Browse Source

初始化项目

zhuizhu 10 tháng trước cách đây
commit
68df762cb0
100 tập tin đã thay đổi với 7755 bổ sung0 xóa
  1. 5 0
      .gitignore
  2. 9 0
      .vscode/settings.json
  3. 42 0
      AvRecorder/AvRecorder.pri
  4. 359 0
      AvRecorder/audioinput.cpp
  5. 138 0
      AvRecorder/audioinput.h
  6. 4 0
      AvRecorder/basic/basic.cpp
  7. 84 0
      AvRecorder/basic/basic.h
  8. 102 0
      AvRecorder/basic/frame.cpp
  9. 78 0
      AvRecorder/basic/frame.h
  10. 88 0
      AvRecorder/basic/timer.h
  11. 9 0
      AvRecorder/capturer/audio/audio.pri
  12. 189 0
      AvRecorder/capturer/audio/audio_capturer.cpp
  13. 51 0
      AvRecorder/capturer/audio/audio_capturer.h
  14. 465 0
      AvRecorder/capturer/audio/audio_qt_capturer.cpp
  15. 79 0
      AvRecorder/capturer/audio/audio_qt_capturer.h
  16. 1 0
      AvRecorder/capturer/audio/iaudiocapturer.cpp
  17. 61 0
      AvRecorder/capturer/audio/iaudiocapturer.h
  18. 48 0
      AvRecorder/capturer/base_capturer.h
  19. 9 0
      AvRecorder/capturer/capturer.pri
  20. 98 0
      AvRecorder/capturer/finder.cpp
  21. 43 0
      AvRecorder/capturer/finder.h
  22. 0 0
      AvRecorder/capturer/ivideocapturer.cpp
  23. 276 0
      AvRecorder/capturer/video/dxgi_capturer.cpp
  24. 27 0
      AvRecorder/capturer/video/dxgi_capturer.h
  25. 56 0
      AvRecorder/capturer/video/gdi_capturer.cpp
  26. 25 0
      AvRecorder/capturer/video/gdi_capturer.h
  27. 3 0
      AvRecorder/capturer/video/ivideocapturer.cpp
  28. 12 0
      AvRecorder/capturer/video/ivideocapturer.h
  29. 25 0
      AvRecorder/capturer/video/video.pri
  30. 145 0
      AvRecorder/capturer/video/video_capturer.cpp
  31. 46 0
      AvRecorder/capturer/video/video_capturer.h
  32. 88 0
      AvRecorder/capturer/video/wgc/App.cpp
  33. 46 0
      AvRecorder/capturer/video/wgc/App.h
  34. 175 0
      AvRecorder/capturer/video/wgc/SimpleCapture.cpp
  35. 62 0
      AvRecorder/capturer/video/wgc/SimpleCapture.h
  36. 24 0
      AvRecorder/capturer/video/wgc/capture.interop.h
  37. 61 0
      AvRecorder/capturer/video/wgc/composition.interop.h
  38. 173 0
      AvRecorder/capturer/video/wgc/d3dHelpers.h
  39. 40 0
      AvRecorder/capturer/video/wgc/direct3d11.interop.h
  40. 9 0
      AvRecorder/capturer/video/wgc/pch.cpp
  41. 34 0
      AvRecorder/capturer/video/wgc/pch.h
  42. 32 0
      AvRecorder/capturer/video/wgc/winrt.cpp
  43. 24 0
      AvRecorder/capturer/video/wgc/winrt.h
  44. 97 0
      AvRecorder/capturer/video/wgc_capturer.cpp
  45. 35 0
      AvRecorder/capturer/video/wgc_capturer.h
  46. 60 0
      AvRecorder/d3d/buffer_filler.cpp
  47. 24 0
      AvRecorder/d3d/buffer_filler.h
  48. 131 0
      AvRecorder/d3d/convert.cpp
  49. 80 0
      AvRecorder/d3d/convert.h
  50. 81 0
      AvRecorder/d3d/gen_frame.cpp
  51. 14 0
      AvRecorder/d3d/gen_frame.h
  52. 14 0
      AvRecorder/encoder/abstract_encoder.cpp
  53. 31 0
      AvRecorder/encoder/abstract_encoder.h
  54. 52 0
      AvRecorder/encoder/audio_encoder.cpp
  55. 24 0
      AvRecorder/encoder/audio_encoder.h
  56. 330 0
      AvRecorder/encoder/audio_mixer.cpp
  57. 100 0
      AvRecorder/encoder/audio_mixer.h
  58. 200 0
      AvRecorder/encoder/video_encoder.cpp
  59. 47 0
      AvRecorder/encoder/video_encoder.h
  60. 12 0
      AvRecorder/main.cpp
  61. 152 0
      AvRecorder/muxer/av_muxer.cpp
  62. 43 0
      AvRecorder/muxer/av_muxer.h
  63. 137 0
      AvRecorder/recorder/audio_recorder.cpp
  64. 50 0
      AvRecorder/recorder/audio_recorder.h
  65. 107 0
      AvRecorder/recorder/video_recorder.cpp
  66. 43 0
      AvRecorder/recorder/video_recorder.h
  67. 4 0
      AvRecorder/test/basic/basic.cpp
  68. 85 0
      AvRecorder/test/basic/basic.h
  69. 102 0
      AvRecorder/test/basic/frame.cpp
  70. 78 0
      AvRecorder/test/basic/frame.h
  71. 88 0
      AvRecorder/test/basic/timer.h
  72. 182 0
      AvRecorder/test/capturer/audio_capturer.cpp
  73. 53 0
      AvRecorder/test/capturer/audio_capturer.h
  74. 226 0
      AvRecorder/test/capturer/dxgi_capturer.cpp
  75. 39 0
      AvRecorder/test/capturer/dxgi_capturer.h
  76. 98 0
      AvRecorder/test/capturer/finder.cpp
  77. 43 0
      AvRecorder/test/capturer/finder.h
  78. 56 0
      AvRecorder/test/capturer/gdi_capturer.cpp
  79. 26 0
      AvRecorder/test/capturer/gdi_capturer.h
  80. 145 0
      AvRecorder/test/capturer/video_capturer.cpp
  81. 46 0
      AvRecorder/test/capturer/video_capturer.h
  82. 88 0
      AvRecorder/test/capturer/wgc/App.cpp
  83. 46 0
      AvRecorder/test/capturer/wgc/App.h
  84. 175 0
      AvRecorder/test/capturer/wgc/SimpleCapture.cpp
  85. 62 0
      AvRecorder/test/capturer/wgc/SimpleCapture.h
  86. 24 0
      AvRecorder/test/capturer/wgc/capture.interop.h
  87. 61 0
      AvRecorder/test/capturer/wgc/composition.interop.h
  88. 173 0
      AvRecorder/test/capturer/wgc/d3dHelpers.h
  89. 40 0
      AvRecorder/test/capturer/wgc/direct3d11.interop.h
  90. 9 0
      AvRecorder/test/capturer/wgc/pch.cpp
  91. 34 0
      AvRecorder/test/capturer/wgc/pch.h
  92. 32 0
      AvRecorder/test/capturer/wgc/winrt.cpp
  93. 24 0
      AvRecorder/test/capturer/wgc/winrt.h
  94. 96 0
      AvRecorder/test/capturer/wgc_capturer.cpp
  95. 35 0
      AvRecorder/test/capturer/wgc_capturer.h
  96. 60 0
      AvRecorder/test/d3d/buffer_filler.cpp
  97. 24 0
      AvRecorder/test/d3d/buffer_filler.h
  98. 131 0
      AvRecorder/test/d3d/convert.cpp
  99. 80 0
      AvRecorder/test/d3d/convert.h
  100. 81 0
      AvRecorder/test/d3d/gen_frame.cpp

+ 5 - 0
.gitignore

@@ -0,0 +1,5 @@
+/build
+/bin/LearningSmartClientd.ilk
+/bin/LearningSmartClientd.pdb
+/bin/LearningSmartClientd.exe
+/*.user

+ 9 - 0
.vscode/settings.json

@@ -0,0 +1,9 @@
+{
+  "files.associations": {
+    "*.lua": "lua",
+    "*.tcc": "cpp",
+    "optional": "cpp",
+    "system_error": "cpp",
+    "regex": "cpp"
+  }
+}

+ 42 - 0
AvRecorder/AvRecorder.pri

@@ -0,0 +1,42 @@
+INCLUDEPATH += $$PWD
+HEADERS += \
+    $$PWD/basic/basic.h \
+    $$PWD/basic/frame.h \
+    $$PWD/basic/timer.h \
+    $$PWD/d3d/buffer_filler.h \
+    $$PWD/d3d/convert.h \
+    $$PWD/d3d/gen_frame.h \
+    $$PWD/encoder/abstract_encoder.h \
+    $$PWD/encoder/audio_encoder.h \
+    $$PWD/encoder/audio_mixer.h \
+    $$PWD/encoder/video_encoder.h \
+    $$PWD/muxer/av_muxer.h \
+    $$PWD/recorder/audio_recorder.h \
+    $$PWD/recorder/video_recorder.h \
+    $$PWD/ui/audio_render.h \
+    $$PWD/ui/audio_widget.h \
+    $$PWD/ui/av_recorder.h \
+    $$PWD/ui/opengl_video_widget.h \
+    $$PWD/ui/settings_page.h
+
+SOURCES += \
+    $$PWD/basic/basic.cpp \
+    $$PWD/basic/frame.cpp \
+    $$PWD/d3d/buffer_filler.cpp \
+    $$PWD/d3d/convert.cpp \
+    $$PWD/d3d/gen_frame.cpp \
+    $$PWD/encoder/abstract_encoder.cpp \
+    $$PWD/encoder/audio_encoder.cpp \
+    $$PWD/encoder/audio_mixer.cpp \
+    $$PWD/encoder/video_encoder.cpp \
+    $$PWD/muxer/av_muxer.cpp \
+    $$PWD/recorder/audio_recorder.cpp \
+    $$PWD/recorder/video_recorder.cpp \
+    $$PWD/ui/audio_render.cpp \
+    $$PWD/ui/audio_widget.cpp \
+    $$PWD/ui/av_recorder.cpp \
+    $$PWD/ui/opengl_video_widget.cpp \
+    $$PWD/ui/settings_page.cpp
+
+
+include($$PWD/capturer/capturer.pri)

+ 359 - 0
AvRecorder/audioinput.cpp

@@ -0,0 +1,359 @@
+/****************************************************************************
+**
+** Copyright (C) 2017 The Qt Company Ltd.
+** Contact: https://www.qt.io/licensing/
+**
+** This file is part of the examples of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:BSD$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and The Qt Company. For licensing terms
+** and conditions see https://www.qt.io/terms-conditions. For further
+** information use the contact form at https://www.qt.io/contact-us.
+**
+** BSD License Usage
+** Alternatively, you may use this file under the terms of the BSD license
+** as follows:
+**
+** "Redistribution and use in source and binary forms, with or without
+** modification, are permitted provided that the following conditions are
+** met:
+**   * Redistributions of source code must retain the above copyright
+**     notice, this list of conditions and the following disclaimer.
+**   * Redistributions in binary form must reproduce the above copyright
+**     notice, this list of conditions and the following disclaimer in
+**     the documentation and/or other materials provided with the
+**     distribution.
+**   * Neither the name of The Qt Company Ltd nor the names of its
+**     contributors may be used to endorse or promote products derived
+**     from this software without specific prior written permission.
+**
+**
+** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+** "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+** LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+** A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+** OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+** SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+** LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+** DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+** THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+** (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+** OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include "audioinput.h"
+
+#include <stdlib.h>
+#include <math.h>
+
+#include <QDateTime>
+#include <QDebug>
+#include <QPainter>
+#include <QVBoxLayout>
+#include <QAudioDeviceInfo>
+#include <QAudioInput>
+#include <qendian.h>
+
+AudioInfo::AudioInfo(const QAudioFormat &format)
+    : m_format(format)
+{
+    switch (m_format.sampleSize()) {
+    case 8:
+        switch (m_format.sampleType()) {
+        case QAudioFormat::UnSignedInt:
+            m_maxAmplitude = 255;
+            break;
+        case QAudioFormat::SignedInt:
+            m_maxAmplitude = 127;
+            break;
+        default:
+            break;
+        }
+        break;
+    case 16:
+        switch (m_format.sampleType()) {
+        case QAudioFormat::UnSignedInt:
+            m_maxAmplitude = 65535;
+            break;
+        case QAudioFormat::SignedInt:
+            m_maxAmplitude = 32767;
+            break;
+        default:
+            break;
+        }
+        break;
+
+    case 32:
+        switch (m_format.sampleType()) {
+        case QAudioFormat::UnSignedInt:
+            m_maxAmplitude = 0xffffffff;
+            break;
+        case QAudioFormat::SignedInt:
+            m_maxAmplitude = 0x7fffffff;
+            break;
+        case QAudioFormat::Float:
+            m_maxAmplitude = 0x7fffffff; // Kind of
+        default:
+            break;
+        }
+        break;
+
+    default:
+        break;
+    }
+}
+
+void AudioInfo::start()
+{
+    open(QIODevice::WriteOnly);
+}
+
+void AudioInfo::stop()
+{
+    close();
+}
+
+qint64 AudioInfo::readData(char *data, qint64 maxlen)
+{
+    Q_UNUSED(data)
+    Q_UNUSED(maxlen)
+
+    return 0;
+}
+
+qint64 AudioInfo::writeData(const char *data, qint64 len)
+{
+    if (m_maxAmplitude) {
+        Q_ASSERT(m_format.sampleSize() % 8 == 0);
+        const int channelBytes = m_format.sampleSize() / 8;
+        const int sampleBytes = m_format.channelCount() * channelBytes;
+        Q_ASSERT(len % sampleBytes == 0);
+        const int numSamples = len / sampleBytes;
+
+        quint32 maxValue = 0;
+        const unsigned char *ptr = reinterpret_cast<const unsigned char *>(data);
+
+        for (int i = 0; i < numSamples; ++i) {
+            for (int j = 0; j < m_format.channelCount(); ++j) {
+                quint32 value = 0;
+
+                if (m_format.sampleSize() == 8 && m_format.sampleType() == QAudioFormat::UnSignedInt) {
+                    value = *reinterpret_cast<const quint8*>(ptr);
+                } else if (m_format.sampleSize() == 8 && m_format.sampleType() == QAudioFormat::SignedInt) {
+                    value = qAbs(*reinterpret_cast<const qint8*>(ptr));
+                } else if (m_format.sampleSize() == 16 && m_format.sampleType() == QAudioFormat::UnSignedInt) {
+                    if (m_format.byteOrder() == QAudioFormat::LittleEndian)
+                        value = qFromLittleEndian<quint16>(ptr);
+                    else
+                        value = qFromBigEndian<quint16>(ptr);
+                } else if (m_format.sampleSize() == 16 && m_format.sampleType() == QAudioFormat::SignedInt) {
+                    if (m_format.byteOrder() == QAudioFormat::LittleEndian)
+                        value = qAbs(qFromLittleEndian<qint16>(ptr));
+                    else
+                        value = qAbs(qFromBigEndian<qint16>(ptr));
+                } else if (m_format.sampleSize() == 32 && m_format.sampleType() == QAudioFormat::UnSignedInt) {
+                    if (m_format.byteOrder() == QAudioFormat::LittleEndian)
+                        value = qFromLittleEndian<quint32>(ptr);
+                    else
+                        value = qFromBigEndian<quint32>(ptr);
+                } else if (m_format.sampleSize() == 32 && m_format.sampleType() == QAudioFormat::SignedInt) {
+                    if (m_format.byteOrder() == QAudioFormat::LittleEndian)
+                        value = qAbs(qFromLittleEndian<qint32>(ptr));
+                    else
+                        value = qAbs(qFromBigEndian<qint32>(ptr));
+                } else if (m_format.sampleSize() == 32 && m_format.sampleType() == QAudioFormat::Float) {
+                    value = qAbs(*reinterpret_cast<const float*>(ptr) * 0x7fffffff); // assumes 0-1.0
+                }
+
+                maxValue = qMax(value, maxValue);
+                ptr += channelBytes;
+            }
+        }
+
+        maxValue = qMin(maxValue, m_maxAmplitude);
+        m_level = qreal(maxValue) / m_maxAmplitude;
+    }
+
+    emit update();
+    return len;
+}
+
+RenderArea::RenderArea(QWidget *parent)
+    : QWidget(parent)
+{
+    setBackgroundRole(QPalette::Base);
+    setAutoFillBackground(true);
+
+    setMinimumHeight(30);
+    setMinimumWidth(200);
+}
+
+void RenderArea::paintEvent(QPaintEvent * /* event */)
+{
+    QPainter painter(this);
+
+    painter.setPen(Qt::black);
+    painter.drawRect(QRect(painter.viewport().left()+10,
+                           painter.viewport().top()+10,
+                           painter.viewport().right()-20,
+                           painter.viewport().bottom()-20));
+    if (m_level == 0.0)
+        return;
+
+    int pos = ((painter.viewport().right()-20)-(painter.viewport().left()+11))*m_level;
+    painter.fillRect(painter.viewport().left()+11,
+                     painter.viewport().top()+10,
+                     pos,
+                     painter.viewport().height()-21,
+                     Qt::red);
+}
+
+void RenderArea::setLevel(qreal value)
+{
+    m_level = value;
+    update();
+}
+
+
+InputTest::InputTest()
+{
+    initializeWindow();
+    initializeAudio(QAudioDeviceInfo::defaultInputDevice());
+}
+
+
+void InputTest::initializeWindow()
+{
+    QWidget *window = new QWidget;
+    QVBoxLayout *layout = new QVBoxLayout;
+
+    m_canvas = new RenderArea(this);
+    layout->addWidget(m_canvas);
+
+    m_deviceBox = new QComboBox(this);
+    const QAudioDeviceInfo &defaultDeviceInfo = QAudioDeviceInfo::defaultInputDevice();
+    m_deviceBox->addItem(defaultDeviceInfo.deviceName(), QVariant::fromValue(defaultDeviceInfo));
+    for (auto &deviceInfo: QAudioDeviceInfo::availableDevices(QAudio::AudioInput)) {
+        if (deviceInfo != defaultDeviceInfo)
+            m_deviceBox->addItem(deviceInfo.deviceName(), QVariant::fromValue(deviceInfo));
+    }
+
+    connect(m_deviceBox, QOverload<int>::of(&QComboBox::activated), this, &InputTest::deviceChanged);
+    layout->addWidget(m_deviceBox);
+
+    m_volumeSlider = new QSlider(Qt::Horizontal, this);
+    m_volumeSlider->setRange(0, 100);
+    m_volumeSlider->setValue(100);
+    connect(m_volumeSlider, &QSlider::valueChanged, this, &InputTest::sliderChanged);
+    layout->addWidget(m_volumeSlider);
+
+    m_modeButton = new QPushButton(this);
+    connect(m_modeButton, &QPushButton::clicked, this, &InputTest::toggleMode);
+    layout->addWidget(m_modeButton);
+
+    m_suspendResumeButton = new QPushButton(this);
+    connect(m_suspendResumeButton, &QPushButton::clicked, this, &InputTest::toggleSuspend);
+    layout->addWidget(m_suspendResumeButton);
+
+    window->setLayout(layout);
+
+    setCentralWidget(window);
+    window->show();
+}
+
+void InputTest::initializeAudio(const QAudioDeviceInfo &deviceInfo)
+{
+    QAudioFormat format;
+    format.setSampleRate(8000);
+    format.setChannelCount(1);
+    format.setSampleSize(16);
+    format.setSampleType(QAudioFormat::SignedInt);
+    format.setByteOrder(QAudioFormat::LittleEndian);
+    format.setCodec("audio/pcm");
+
+    if (!deviceInfo.isFormatSupported(format)) {
+        qWarning() << "Default format not supported - trying to use nearest";
+        format = deviceInfo.nearestFormat(format);
+    }
+
+    m_audioInfo.reset(new AudioInfo(format));
+    connect(m_audioInfo.data(), &AudioInfo::update, [this]() {
+        m_canvas->setLevel(m_audioInfo->level());
+    });
+
+    m_audioInput.reset(new QAudioInput(deviceInfo, format));
+    qreal initialVolume = QAudio::convertVolume(m_audioInput->volume(),
+                                                QAudio::LinearVolumeScale,
+                                                QAudio::LogarithmicVolumeScale);
+    m_volumeSlider->setValue(qRound(initialVolume * 100));
+    m_audioInfo->start();
+    toggleMode();
+}
+
+void InputTest::toggleMode()
+{
+    m_audioInput->stop();
+    toggleSuspend();
+
+    // Change bewteen pull and push modes
+    if (m_pullMode) {
+        m_modeButton->setText(tr("Enable push mode"));
+        m_audioInput->start(m_audioInfo.data());
+    } else {
+        m_modeButton->setText(tr("Enable pull mode"));
+        auto io = m_audioInput->start();
+        connect(io, &QIODevice::readyRead,
+            [&, io]() {
+                qint64 len = m_audioInput->bytesReady();
+                const int BufferSize = 4096;
+                if (len > BufferSize)
+                    len = BufferSize;
+
+                QByteArray buffer(len, 0);
+                qint64 l = io->read(buffer.data(), len);
+                if (l > 0)
+                    m_audioInfo->write(buffer.constData(), l);
+            });
+    }
+
+    m_pullMode = !m_pullMode;
+}
+
+void InputTest::toggleSuspend()
+{
+    // toggle suspend/resume
+    if (m_audioInput->state() == QAudio::SuspendedState || m_audioInput->state() == QAudio::StoppedState) {
+        m_audioInput->resume();
+        m_suspendResumeButton->setText(tr("Suspend recording"));
+    } else if (m_audioInput->state() == QAudio::ActiveState) {
+        m_audioInput->suspend();
+        m_suspendResumeButton->setText(tr("Resume recording"));
+    } else if (m_audioInput->state() == QAudio::IdleState) {
+        // no-op
+    }
+}
+
+void InputTest::deviceChanged(int index)
+{
+    m_audioInfo->stop();
+    m_audioInput->stop();
+    m_audioInput->disconnect(this);
+
+    initializeAudio(m_deviceBox->itemData(index).value<QAudioDeviceInfo>());
+}
+
+void InputTest::sliderChanged(int value)
+{
+    qreal linearVolume = QAudio::convertVolume(value / qreal(100),
+                                               QAudio::LogarithmicVolumeScale,
+                                               QAudio::LinearVolumeScale);
+
+    m_audioInput->setVolume(linearVolume);
+}

+ 138 - 0
AvRecorder/audioinput.h

@@ -0,0 +1,138 @@
+/****************************************************************************
+**
+** Copyright (C) 2017 The Qt Company Ltd.
+** Contact: https://www.qt.io/licensing/
+**
+** This file is part of the examples of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:BSD$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and The Qt Company. For licensing terms
+** and conditions see https://www.qt.io/terms-conditions. For further
+** information use the contact form at https://www.qt.io/contact-us.
+**
+** BSD License Usage
+** Alternatively, you may use this file under the terms of the BSD license
+** as follows:
+**
+** "Redistribution and use in source and binary forms, with or without
+** modification, are permitted provided that the following conditions are
+** met:
+**   * Redistributions of source code must retain the above copyright
+**     notice, this list of conditions and the following disclaimer.
+**   * Redistributions in binary form must reproduce the above copyright
+**     notice, this list of conditions and the following disclaimer in
+**     the documentation and/or other materials provided with the
+**     distribution.
+**   * Neither the name of The Qt Company Ltd nor the names of its
+**     contributors may be used to endorse or promote products derived
+**     from this software without specific prior written permission.
+**
+**
+** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+** "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+** LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+** A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+** OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+** SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+** LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+** DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+** THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+** (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+** OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#ifndef AUDIOINPUT_H
+#define AUDIOINPUT_H
+
+#include <QAudioInput>
+#include <QByteArray>
+#include <QComboBox>
+#include <QMainWindow>
+#include <QObject>
+#include <QPixmap>
+#include <QPushButton>
+#include <QSlider>
+#include <QWidget>
+#include <QScopedPointer>
+
+class AudioInfo : public QIODevice
+{
+    Q_OBJECT
+
+public:
+    AudioInfo(const QAudioFormat &format);
+
+    void start();
+    void stop();
+
+    qreal level() const { return m_level; }
+
+    qint64 readData(char *data, qint64 maxlen) override;
+    qint64 writeData(const char *data, qint64 len) override;
+
+private:
+    const QAudioFormat m_format;
+    quint32 m_maxAmplitude = 0;
+    qreal m_level = 0.0; // 0.0 <= m_level <= 1.0
+
+signals:
+    void update();
+};
+
+
+class RenderArea : public QWidget
+{
+    Q_OBJECT
+
+public:
+    explicit RenderArea(QWidget *parent = nullptr);
+
+    void setLevel(qreal value);
+
+protected:
+    void paintEvent(QPaintEvent *event) override;
+
+private:
+    qreal m_level = 0;
+    QPixmap m_pixmap;
+};
+
+
+class InputTest : public QMainWindow
+{
+    Q_OBJECT
+
+public:
+    InputTest();
+
+private:
+    void initializeWindow();
+    void initializeAudio(const QAudioDeviceInfo &deviceInfo);
+
+private slots:
+    void toggleMode();
+    void toggleSuspend();
+    void deviceChanged(int index);
+    void sliderChanged(int value);
+
+private:
+    // Owned by layout
+    RenderArea *m_canvas = nullptr;
+    QPushButton *m_modeButton = nullptr;
+    QPushButton *m_suspendResumeButton = nullptr;
+    QComboBox *m_deviceBox = nullptr;
+    QSlider *m_volumeSlider = nullptr;
+
+    QScopedPointer<AudioInfo> m_audioInfo;
+    QScopedPointer<QAudioInput> m_audioInput;
+    bool m_pullMode = true;
+};
+
+#endif // AUDIOINPUT_H

+ 4 - 0
AvRecorder/basic/basic.cpp

@@ -0,0 +1,4 @@
+
+#include "basic/basic.h"
+
+std::mutex __mtx;

+ 84 - 0
AvRecorder/basic/basic.h

@@ -0,0 +1,84 @@
+#ifndef __BASIC_FUCN_H__
+#define __BASIC_FUCN_H__
+#define __STDC_FORMAT_MACROS
+
+#include <functional>
+#include <mutex>
+#include <thread>
+
+extern "C" {
+#include <libavcodec/avcodec.h>
+#include <libavformat/avformat.h>
+}
+
+// ***************
+// MUTEX
+extern std::mutex __mtx;
+
+// ***************
+// debug function
+
+#define __AVDEBUG
+
+#ifdef __AVDEBUG
+#define __DebugPrint(fmtStr, ...) \
+    std::printf("[" __FILE__ ", line:%d] " fmtStr "\n", __LINE__, ##__VA_ARGS__)
+#define __Str(exp) #exp
+#define __Check(retVal, ...)                            \
+    do {                                                \
+        if (!(__VA_ARGS__)) {                           \
+            __DebugPrint(__Str(__VA_ARGS__) " failed"); \
+            return retVal;                              \
+        }                                               \
+    } while (false)
+
+#else
+#define __DebugPrint(fmtStr, ...)
+#define __Check(retVal, ...)  \
+    do {                      \
+        if (!(__VA_ARGS__)) { \
+            return retVal;    \
+        }                     \
+    } while (false)
+#endif
+
+#define __CheckNo(...) __Check(, __VA_ARGS__)
+#define __CheckBool(...) __Check(false, __VA_ARGS__)
+#define __CheckNullptr(...) __Check(nullptr, __VA_ARGS__)
+enum class MediaType {
+    AUDIO,
+    VIDEO
+};
+
+// ***************
+// memory function
+
+template <typename T, typename Func>
+void Free(T*& ptr, Func&& func)
+{
+    static_assert(std::is_convertible_v<Func, std::function<void()>>, "Type Func should be std::function<void()>");
+    if (ptr == nullptr) {
+        return;
+    }
+
+    func();
+    ptr = nullptr;
+}
+
+//***************
+// time function
+
+// Sleep x ms
+inline void SleepMs(int timeMs)
+{
+    std::this_thread::sleep_for(std::chrono::milliseconds(timeMs));
+}
+
+// 对于音频编码器的全局设置
+constexpr int AUDIO_SAMPLE_RATE = 48000;
+constexpr int AUDIO_CHANNEL = 1;
+constexpr AVSampleFormat AUDIO_FMT = AV_SAMPLE_FMT_FLTP;
+constexpr int MICROPHONE_INDEX = 0;
+constexpr int SPEAKER_INDEX = 1;
+
+#endif

+ 102 - 0
AvRecorder/basic/frame.cpp

@@ -0,0 +1,102 @@
+
+#include "basic/frame.h"
+
+extern "C" {
+#include <libswscale/swscale.h>
+}
+
+AVFrame* Frame<MediaType::AUDIO>::Alloc(AVSampleFormat sampleFmt,
+    const AVChannelLayout* channel_layout,
+    int sampleRate, int nbSamples)
+{
+    AVFrame* frame = nullptr;
+    __CheckNullptr(frame = av_frame_alloc());
+    frame->format = sampleFmt;
+    av_channel_layout_copy(&frame->ch_layout, channel_layout);
+    frame->sample_rate = sampleRate;
+    frame->nb_samples = nbSamples;
+
+    /* allocate the buffers for the frame data */
+    __CheckNullptr(av_frame_get_buffer(frame, 0) >= 0);
+    return frame;
+}
+
+Frame<MediaType::AUDIO>::Frame(AVSampleFormat sampleFmt,
+    const AVChannelLayout* channel_layout, int sampleRate,
+    int nbSamples)
+{
+    __CheckNo(frame = Alloc(sampleFmt, channel_layout, sampleRate, nbSamples));
+}
+
+Frame<MediaType::AUDIO>::Frame(AVFrame* frame)
+{
+    if (frame == nullptr) {
+        this->frame = nullptr;
+        return;
+    }
+    __CheckNo(this->frame = Alloc(AVSampleFormat(frame->format), &frame->ch_layout, frame->sample_rate, frame->nb_samples));
+    __CheckNo(av_frame_copy(this->frame, frame) >= 0);
+}
+
+Frame<MediaType::VIDEO>::Frame(AVPixelFormat pixFmt, int width, int height)
+{
+    __CheckNo(frame = Alloc(pixFmt, width, height));
+}
+
+AVFrame* Frame<MediaType::VIDEO>::Alloc(AVPixelFormat pixFmt, int width, int height)
+{
+    AVFrame* frame = nullptr;
+    __CheckNullptr(frame = av_frame_alloc());
+
+    frame->format = pixFmt;
+    frame->width = width;
+    frame->height = height;
+
+    /* allocate the buffers for the frame data */
+    __CheckNullptr(av_frame_get_buffer(frame, 0) >= 0);
+    return frame;
+}
+
+Frame<MediaType::VIDEO>::Frame(AVFrame* frame)
+{
+    if (frame == nullptr) {
+        this->frame = nullptr;
+        return;
+    }
+    __CheckNo(this->frame = Alloc(AVPixelFormat(frame->format), frame->width, frame->height));
+    __CheckNo(av_frame_copy(this->frame, frame) >= 0);
+}
+
+bool FfmpegConverter::SetSize(int width, int height)
+{
+    Free(_swsCtx, [this] { sws_freeContext(_swsCtx); });
+    Free(_frameTo, [this] { av_frame_free(&_frameTo); });
+    // 创建格式转换
+    __CheckBool(_swsCtx = sws_getContext(
+                    width, height, _from,
+                    width, height, _to,
+                    0, NULL, NULL, NULL));
+
+    __CheckBool(_frameTo = Frame<MediaType::VIDEO>::Alloc(_to, width, height));
+    return true;
+}
+
+AVFrame* FfmpegConverter::Trans(AVFrame* frameFrom)
+{
+    // 如果是空指针,直接把缓存返回
+    if (frameFrom == nullptr) {
+        return _frameTo;
+    }
+    __CheckNullptr(
+        sws_scale(_swsCtx, (const uint8_t* const*)frameFrom->data,
+            frameFrom->linesize, 0, frameFrom->height, _frameTo->data,
+            _frameTo->linesize)
+        >= 0);
+    return _frameTo;
+}
+
+FfmpegConverter::~FfmpegConverter()
+{
+    Free(_swsCtx, [this] { sws_freeContext(_swsCtx); });
+    Free(_frameTo, [this] { av_frame_free(&_frameTo); });
+}

+ 78 - 0
AvRecorder/basic/frame.h

@@ -0,0 +1,78 @@
+#ifndef __FRAME_H__
+#define __FRAME_H__
+#include "basic/basic.h"
+
+class __BasicFrame {
+public:
+    AVFrame* frame = nullptr;
+    __BasicFrame() = default;
+    __BasicFrame(__BasicFrame&& rhs) noexcept
+    {
+        frame = rhs.frame;
+        rhs.frame = nullptr;
+    }
+    __BasicFrame& operator=(__BasicFrame&& rhs)
+    {
+        Free(frame, [this] { av_frame_free(&frame); });
+        frame = rhs.frame;
+        rhs.frame = nullptr;
+        return *this;
+    }
+    __BasicFrame(const __BasicFrame& rhs) = delete;
+    __BasicFrame& operator=(const __BasicFrame& rhs) = delete;
+    ~__BasicFrame()
+    {
+        Free(frame, [this] { av_frame_free(&frame); });
+    }
+};
+
+template <MediaType mediaType>
+class Frame;
+
+template <>
+class Frame<MediaType::AUDIO> : public __BasicFrame {
+public:
+    static AVFrame* Alloc(AVSampleFormat sampleFmt,
+        const AVChannelLayout* channel_layout,
+        int sampleRate, int nbSamples);
+
+    Frame(AVSampleFormat sampleFmt,
+        const AVChannelLayout* channel_layout, int sampleRate,
+        int nbSamples);
+
+    Frame(AVFrame* frame);
+    Frame() = default;
+};
+
+template <>
+class Frame<MediaType::VIDEO> : public __BasicFrame {
+public:
+    static AVFrame* Alloc(AVPixelFormat pixFmt, int width, int height);
+    Frame(AVPixelFormat pixFmt, int width, int height);
+    Frame(AVFrame* frame);
+    Frame() = default;
+};
+
+struct SwsContext;
+
+class FfmpegConverter {
+private:
+    AVPixelFormat _from;
+    AVPixelFormat _to;
+
+public:
+    FfmpegConverter(AVPixelFormat from, AVPixelFormat to)
+        : _from(from)
+        , _to(to)
+    {
+    }
+    bool SetSize(int width, int height);
+    AVFrame* Trans(AVFrame* frameFrom);
+    ~FfmpegConverter();
+
+private:
+    AVFrame* _frameTo = nullptr;
+    SwsContext* _swsCtx = nullptr;
+};
+
+#endif

+ 88 - 0
AvRecorder/basic/timer.h

@@ -0,0 +1,88 @@
+#ifndef __TIMER_H__
+#define __TIMER_H__
+
+#include "basic/basic.h"
+
+#include <functional>
+
+class Timer
+{
+public:
+    ~Timer() { Stop(); }
+
+    // interval 为 0 表示时刻执行
+    template<typename Func>
+    void Start(int fps, Func&& func)
+    {
+        static_assert(std::is_convertible_v<Func, std::function<void()>>,
+                      "func need to be std::function<void()>");
+        _fps = fps;
+        _tickCnt = 0;
+        _isOverload = false;
+        __CheckNo(!_isRunning);
+        using namespace std::chrono;
+        _isRunning = true;
+        _beginTime = high_resolution_clock::now();
+        if (_fps > 0) {
+            auto task = [this, func = std::forward<Func>(func)]() mutable {
+                while (_isRunning) {
+                    // 这里不能直接使用整数除法
+                    // 因为整数除法有截断,导致最终睡眠的时间少一些
+                    uint64_t goalTime = int((double(1000.0) / _fps * _tickCnt) + 0.5);
+                    ++_tickCnt;
+                    auto nowTime = high_resolution_clock::now();
+                    auto duration = duration_cast<milliseconds>(nowTime - _beginTime).count();
+                    int64_t sleepTime = goalTime - duration;
+                    if (sleepTime > 0) {
+                        SleepMs(sleepTime);
+                    }
+#ifdef __AVDEBUG
+                    // else if (sleepTime < 0) {
+                    //     printf("Time out : %lld\n", -sleepTime);
+                    // }
+#endif
+                    _isOverload = -sleepTime > 1000; // 捕获的过载时间设置为 1s
+                    func();
+                }
+            };
+            _thread = new std::thread(std::move(task));
+            // timeBeginPeriod(1);
+            return;
+        }
+
+        auto task = [this, func = std::forward<Func>(func)]() mutable {
+            while (_isRunning) {
+                func();
+            }
+        };
+        _thread = new std::thread(std::move(task));
+    }
+
+    void Stop()
+    {
+        _isRunning = false;
+        if (_thread == nullptr) {
+            return;
+        }
+        // if (_fps > 0) {
+        //     timeEndPeriod(1);
+        // }
+        _thread->join();
+        delete _thread;
+
+        _thread = nullptr;
+    }
+
+    bool IsOverload() const { return _isOverload; }
+
+private:
+    int _fps = 100;
+    int _isRunning = false;
+    int _isOverload = false;
+    std::vector<int> vec;
+    std::chrono::time_point<std::chrono::high_resolution_clock> _beginTime;
+    std::thread* _thread = nullptr;
+    uint64_t _tickCnt = 0;
+};
+
+#endif

+ 9 - 0
AvRecorder/capturer/audio/audio.pri

@@ -0,0 +1,9 @@
+HEADERS += \
+    $$PWD/audio_capturer.h \
+    $$PWD/audio_qt_capturer.h \
+    $$PWD/iaudiocapturer.h
+
+SOURCES += \
+    $$PWD/audio_capturer.cpp \
+    $$PWD/audio_qt_capturer.cpp \
+    $$PWD/iaudiocapturer.cpp

+ 189 - 0
AvRecorder/capturer/audio/audio_capturer.cpp

@@ -0,0 +1,189 @@
+#include "audio_capturer.h"
+
+#include "basic/basic.h"
+
+#define DEFAULT_SAMPLE_RATE 48000        // 默认采样率:48kHz
+#define DEFAULT_BITS_PER_SAMPLE 16       // 默认位深:16bit
+#define DEFAULT_CHANNELS 1               // 默认音频通道数:1
+#define DEFAULT_AUDIO_PACKET_INTERVAL 10 // 默认音频包发送间隔:10ms
+
+bool AudioCapturer::Init(Type deviceType, CallBack callback, void* userInfo)
+{
+    Stop();
+    _userInfo = userInfo;
+    _callback = callback;
+    _deviceType = deviceType;
+    __CheckBool(_CreateDeviceEnumerator(&_pDeviceEnumerator));
+    __CheckBool(_CreateDevice(_pDeviceEnumerator, &_pDevice));
+    __CheckBool(_CreateAudioClient(_pDevice, &_pAudioClient));
+
+    if (!_IsFormatSupported(_pAudioClient)) {
+        __CheckBool(_GetPreferFormat(_pAudioClient, &_formatex));
+    }
+    __CheckBool(_InitAudioClient(_pAudioClient, &_formatex));
+    __CheckBool(_CreateAudioCaptureClient(_pAudioClient, &_pAudioCaptureClient));
+
+    _format.sampleRate = _formatex.Format.nSamplesPerSec;
+    _format.channels = _formatex.Format.nChannels;
+    _format.bitsPerSample = _formatex.Format.wBitsPerSample;
+    _format.avgBytesPerSec = _formatex.Format.nAvgBytesPerSec;
+    _format.blockAlign = _formatex.Format.nBlockAlign;
+
+    _isInit = true;
+    return true;
+}
+
+bool AudioCapturer::Start()
+{
+    __CheckBool(_isInit);
+    _loopFlag = true;
+    // 用于强制打开扬声器
+    PlaySoundA("./rc/mute.wav", nullptr, SND_FILENAME | SND_ASYNC | SND_LOOP);
+    _captureThread = new std::thread(
+        [this] { _ThreadRun(_pAudioClient, _pAudioCaptureClient); });
+    return true;
+}
+
+void AudioCapturer::Stop()
+{
+    // CoUninitialize();
+    _isInit = false;
+    _loopFlag = false;
+    Free(_captureThread, [this] {
+        _captureThread->join();
+        delete _captureThread;
+    });
+    Free(_pAudioCaptureClient, [this] { _pAudioCaptureClient->Release(); });
+    if (_pAudioClient != nullptr) {
+        _pAudioClient->Stop();
+    }
+    PlaySoundA(nullptr, nullptr, SND_FILENAME | SND_ASYNC | SND_LOOP);
+
+    Free(_pAudioClient, [this] { _pAudioClient->Release(); });
+    Free(_pDevice, [this] { _pDevice->Release(); });
+    Free(_pDeviceEnumerator, [this] { _pDeviceEnumerator->Release(); });
+}
+
+bool AudioCapturer::_CreateDeviceEnumerator(IMMDeviceEnumerator** enumerator)
+{
+    // __CheckBool(SUCCEEDED(CoInitializeEx(nullptr, COINIT_MULTITHREADED)));
+    // __CheckBool(SUCCEEDED(CoInitializeEx(nullptr, COINIT_APARTMENTTHREADED)));
+    __CheckBool(SUCCEEDED(CoCreateInstance(__uuidof(MMDeviceEnumerator), NULL, CLSCTX_ALL,
+        __uuidof(IMMDeviceEnumerator),
+        reinterpret_cast<void**>(enumerator))));
+    return true;
+}
+bool AudioCapturer::_CreateDevice(IMMDeviceEnumerator* enumerator, IMMDevice** device)
+{
+    EDataFlow enDataFlow = _deviceType == Microphone ? eCapture : eRender;
+    ERole enRole = eConsole;
+    __CheckBool(SUCCEEDED(enumerator->GetDefaultAudioEndpoint(enDataFlow, enRole, device)));
+    return true;
+}
+bool AudioCapturer::_CreateAudioClient(IMMDevice* device, IAudioClient** audioClient)
+{
+    __CheckBool(SUCCEEDED(device->Activate(__uuidof(IAudioClient), CLSCTX_ALL, NULL,
+        (void**)audioClient)));
+    return true;
+}
+bool AudioCapturer::_IsFormatSupported(IAudioClient* audioClient)
+{
+    memset(&_formatex, 0, sizeof(_formatex));
+    WAVEFORMATEX* format = &_formatex.Format;
+    format->nSamplesPerSec = DEFAULT_SAMPLE_RATE;
+    format->wBitsPerSample = DEFAULT_BITS_PER_SAMPLE;
+    format->nChannels = DEFAULT_CHANNELS;
+
+    WAVEFORMATEX* closestMatch = nullptr;
+
+    HRESULT hr = audioClient->IsFormatSupported(AUDCLNT_SHAREMODE_SHARED,
+        format, &closestMatch);
+    if (hr == AUDCLNT_E_UNSUPPORTED_FORMAT) // 0x88890008
+    {
+        if (closestMatch == nullptr) // 如果找不到最相近的格式,closestMatch可能为nullptr
+        {
+            return false;
+        }
+
+        format->nSamplesPerSec = closestMatch->nSamplesPerSec;
+        format->wBitsPerSample = closestMatch->wBitsPerSample;
+        format->nChannels = closestMatch->nChannels;
+
+        return true;
+    }
+
+    return false;
+}
+bool AudioCapturer::_GetPreferFormat(IAudioClient* audioClient,
+    WAVEFORMATEXTENSIBLE* formatex)
+{
+    WAVEFORMATEX* format = nullptr;
+    __CheckBool(SUCCEEDED(audioClient->GetMixFormat(&format)));
+    formatex->Format.nSamplesPerSec = format->nSamplesPerSec;
+    formatex->Format.wBitsPerSample = format->wBitsPerSample;
+    formatex->Format.nChannels = format->nChannels;
+    return true;
+}
+bool AudioCapturer::_InitAudioClient(IAudioClient* audioClient,
+    WAVEFORMATEXTENSIBLE* formatex)
+{
+    AUDCLNT_SHAREMODE shareMode = AUDCLNT_SHAREMODE_SHARED; // share Audio Engine with other applications
+    DWORD streamFlags = _deviceType == Microphone ? 0 : AUDCLNT_STREAMFLAGS_LOOPBACK;
+    streamFlags |= AUDCLNT_STREAMFLAGS_AUTOCONVERTPCM;      // A channel matrixer and a sample
+                                                            // rate converter are inserted
+    streamFlags |= AUDCLNT_STREAMFLAGS_SRC_DEFAULT_QUALITY; // a sample rate converter
+                                                            // with better quality than
+                                                            // the default conversion but
+                                                            // with a higher performance
+                                                            // cost is used
+    REFERENCE_TIME hnsBufferDuration = 0;
+    WAVEFORMATEX* format = &formatex->Format;
+    format->wFormatTag = WAVE_FORMAT_EXTENSIBLE;
+    format->nBlockAlign = (format->wBitsPerSample >> 3) * format->nChannels;
+    format->nAvgBytesPerSec = format->nBlockAlign * format->nSamplesPerSec;
+    format->cbSize = sizeof(WAVEFORMATEXTENSIBLE) - sizeof(WAVEFORMATEX);
+    formatex->Samples.wValidBitsPerSample = format->wBitsPerSample;
+    formatex->dwChannelMask = format->nChannels == 1 ? KSAUDIO_SPEAKER_MONO : KSAUDIO_SPEAKER_STEREO;
+    formatex->SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
+
+    __CheckBool(SUCCEEDED(audioClient->Initialize(shareMode, streamFlags, hnsBufferDuration, 0,
+        format, nullptr)));
+    return true;
+}
+
+bool AudioCapturer::_CreateAudioCaptureClient(IAudioClient* audioClient,
+    IAudioCaptureClient** audioCaptureClient)
+{
+    __CheckBool(SUCCEEDED(audioClient->GetService(IID_PPV_ARGS(audioCaptureClient))));
+    return true;
+}
+
+bool AudioCapturer::_ThreadRun(IAudioClient* audio_client,
+    IAudioCaptureClient* audio_capture_client)
+{
+    UINT32 num_success = 0;
+    BYTE* p_audio_data = nullptr;
+    UINT32 num_frames_to_read = 0;
+    DWORD dw_flag = 0;
+    UINT32 num_frames_in_next_packet = 0;
+    audio_client->Start();
+    while (_loopFlag) {
+        SleepMs(5);
+        while (true) {
+            __CheckBool(SUCCEEDED(audio_capture_client->GetNextPacketSize(&num_frames_in_next_packet)));
+            if (num_frames_in_next_packet == 0) {
+                break;
+            }
+
+            __CheckBool(SUCCEEDED(audio_capture_client->GetBuffer(&p_audio_data, &num_frames_to_read,
+                &dw_flag, nullptr, nullptr)));
+
+            size_t size = (_formatex.Format.wBitsPerSample >> 3) * _formatex.Format.nChannels * num_frames_to_read;
+            _callback(p_audio_data, size, _userInfo);
+            __CheckBool(SUCCEEDED(audio_capture_client->ReleaseBuffer(num_frames_to_read)));
+        }
+    }
+
+    audio_client->Stop();
+    return true;
+}

+ 51 - 0
AvRecorder/capturer/audio/audio_capturer.h

@@ -0,0 +1,51 @@
+
+#ifndef __AUDIO_CAPTURER_H__
+#define __AUDIO_CAPTURER_H__
+
+#include "iaudiocapturer.h"
+
+#include <audioclient.h>
+#include <combaseapi.h>
+#include <mmdeviceapi.h>
+
+#include <memory>
+#include <thread>
+
+class AudioCapturer : public IAudioCapturer
+{
+public:
+    bool Init(Type deviceType, CallBack callback, void* userInfo = nullptr) override;
+    bool Start() override;
+    void Stop() override;
+
+    const AudioFormat& GetFormat() const override { return _format; }
+
+private:
+    mutable AudioFormat _format;
+    bool _isInit = false;
+    CallBack _callback;
+    Type _deviceType;
+    IMMDeviceEnumerator* _pDeviceEnumerator = nullptr;
+    IMMDevice* _pDevice = nullptr;
+    IAudioClient* _pAudioClient = nullptr;
+    IAudioCaptureClient* _pAudioCaptureClient = nullptr;
+    std::thread* _captureThread = nullptr;
+    bool _loopFlag = false;
+    WAVEFORMATEXTENSIBLE _formatex;
+    void* _userInfo = nullptr;
+
+    bool _CreateDeviceEnumerator(IMMDeviceEnumerator** enumerator);
+    bool _CreateDevice(IMMDeviceEnumerator* enumerator, IMMDevice** device);
+    bool _CreateAudioClient(IMMDevice* device, IAudioClient** audioClient);
+    bool _IsFormatSupported(IAudioClient* audioClient);
+    bool _GetPreferFormat(IAudioClient* audioClient,
+        WAVEFORMATEXTENSIBLE* formatex);
+    bool _InitAudioClient(IAudioClient* audioClient,
+        WAVEFORMATEXTENSIBLE* formatex);
+    bool _CreateAudioCaptureClient(IAudioClient* audioClient,
+        IAudioCaptureClient** audioCaptureClient);
+    bool _ThreadRun(IAudioClient* audio_client,
+        IAudioCaptureClient* audio_capture_client);
+};
+
+#endif

+ 465 - 0
AvRecorder/capturer/audio/audio_qt_capturer.cpp

@@ -0,0 +1,465 @@
+#include "audio_qt_capturer.h"
+#include "qaudio.h"
+
+#include <QAudioDeviceInfo>
+#include <QDebug>
+
+#include <qendian.h>
+
+AudioInfo::AudioInfo(const QAudioFormat& format)
+    : m_format(format)
+{
+    switch (m_format.sampleSize()) {
+    case 8:
+        switch (m_format.sampleType()) {
+        case QAudioFormat::UnSignedInt:
+            m_maxAmplitude = 255;
+            break;
+        case QAudioFormat::SignedInt:
+            m_maxAmplitude = 127;
+            break;
+        default:
+            break;
+        }
+        break;
+    case 16:
+        switch (m_format.sampleType()) {
+        case QAudioFormat::UnSignedInt:
+            m_maxAmplitude = 65535;
+            break;
+        case QAudioFormat::SignedInt:
+            m_maxAmplitude = 32767;
+            break;
+        default:
+            break;
+        }
+        break;
+
+    case 32:
+        switch (m_format.sampleType()) {
+        case QAudioFormat::UnSignedInt:
+            m_maxAmplitude = 0xffffffff;
+            break;
+        case QAudioFormat::SignedInt:
+            m_maxAmplitude = 0x7fffffff;
+            break;
+        case QAudioFormat::Float:
+            m_maxAmplitude = 0x7fffffff; // Kind of
+        default:
+            break;
+        }
+        break;
+
+    default:
+        break;
+    }
+}
+
+void AudioInfo::start()
+{
+    open(QIODevice::WriteOnly);
+}
+
+void AudioInfo::stop()
+{
+    close();
+}
+
+qint64 AudioInfo::readData(char* data, qint64 maxlen)
+{
+    Q_UNUSED(data)
+    Q_UNUSED(maxlen)
+
+    return 0;
+}
+
+qint64 AudioInfo::writeData(const char* data, qint64 len)
+{
+    if (m_maxAmplitude) {
+        Q_ASSERT(m_format.sampleSize() % 8 == 0);
+        const int channelBytes = m_format.sampleSize() / 8;
+        const int sampleBytes = m_format.channelCount() * channelBytes;
+        Q_ASSERT(len % sampleBytes == 0);
+        const int numSamples = len / sampleBytes;
+
+        quint32 maxValue = 0;
+        const unsigned char* ptr = reinterpret_cast<const unsigned char*>(data);
+
+        for (int i = 0; i < numSamples; ++i) {
+            for (int j = 0; j < m_format.channelCount(); ++j) {
+                quint32 value = 0;
+
+                if (m_format.sampleSize() == 8
+                    && m_format.sampleType() == QAudioFormat::UnSignedInt) {
+                    value = *reinterpret_cast<const quint8*>(ptr);
+                } else if (m_format.sampleSize() == 8
+                           && m_format.sampleType() == QAudioFormat::SignedInt) {
+                    value = qAbs(*reinterpret_cast<const qint8*>(ptr));
+                } else if (m_format.sampleSize() == 16
+                           && m_format.sampleType() == QAudioFormat::UnSignedInt) {
+                    if (m_format.byteOrder() == QAudioFormat::LittleEndian)
+                        value = qFromLittleEndian<quint16>(ptr);
+                    else
+                        value = qFromBigEndian<quint16>(ptr);
+                } else if (m_format.sampleSize() == 16
+                           && m_format.sampleType() == QAudioFormat::SignedInt) {
+                    if (m_format.byteOrder() == QAudioFormat::LittleEndian)
+                        value = qAbs(qFromLittleEndian<qint16>(ptr));
+                    else
+                        value = qAbs(qFromBigEndian<qint16>(ptr));
+                } else if (m_format.sampleSize() == 32
+                           && m_format.sampleType() == QAudioFormat::UnSignedInt) {
+                    if (m_format.byteOrder() == QAudioFormat::LittleEndian)
+                        value = qFromLittleEndian<quint32>(ptr);
+                    else
+                        value = qFromBigEndian<quint32>(ptr);
+                } else if (m_format.sampleSize() == 32
+                           && m_format.sampleType() == QAudioFormat::SignedInt) {
+                    if (m_format.byteOrder() == QAudioFormat::LittleEndian)
+                        value = qAbs(qFromLittleEndian<qint32>(ptr));
+                    else
+                        value = qAbs(qFromBigEndian<qint32>(ptr));
+                } else if (m_format.sampleSize() == 32
+                           && m_format.sampleType() == QAudioFormat::Float) {
+                    value = qAbs(*reinterpret_cast<const float*>(ptr) * 0x7fffffff); // assumes 0-1.0
+                }
+
+                maxValue = qMax(value, maxValue);
+                ptr += channelBytes;
+            }
+        }
+
+        maxValue = qMin(maxValue, m_maxAmplitude);
+        m_level = qreal(maxValue) / m_maxAmplitude;
+    }
+
+    emit update();
+    return len;
+}
+
+QtAudioCapturer::QtAudioCapturer(QObject* parent)
+    : QObject(parent)
+    , m_deviceType(Microphone)
+    , m_callback(nullptr)
+    , m_userInfo(nullptr)
+    , m_isRunning(false)
+    , m_audioDevice(nullptr)
+{
+    // 初始化缓冲区
+    m_buffer.open(QIODevice::ReadWrite);
+
+    // 设置处理定时器
+    connect(&m_processTimer, &QTimer::timeout, this, &QtAudioCapturer::processAudioData);
+}
+
+QtAudioCapturer::~QtAudioCapturer()
+{
+    Stop();
+}
+
+bool QtAudioCapturer::Init(Type deviceType, CallBack callback, void* userInfo)
+{
+    m_deviceType = deviceType;
+    m_callback = callback;
+    m_userInfo = userInfo;
+
+    setupAudioFormat();
+
+    if (m_deviceType == Microphone) {
+        return initMicrophone();
+    } else {
+        return initSpeaker();
+    }
+}
+
+bool QtAudioCapturer::Start()
+{
+    if (m_isRunning) {
+        qDebug() << "QtAudioCapturer::Start - 已经在运行中";
+        return true;
+    }
+
+    if (!m_audioInput && m_deviceType == Microphone) {
+        qWarning() << "QtAudioCapturer::Start - 音频输入未初始化";
+        return false;
+    }
+
+    qDebug() << "QtAudioCapturer::Start - 开始捕获音频,设备类型:"
+             << (m_deviceType == Microphone ? "麦克风" : "扬声器");
+
+    if (m_deviceType == Microphone && m_audioInput) {
+        // 设置更合理的缓冲区大小
+        // int bufferSize = 8192;
+        // 或者使用计算值:m_qtAudioFormat.sampleRate() * m_qtAudioFormat.channelCount() * 2 / 5;
+        // m_audioInput->setBufferSize(bufferSize);
+        // qDebug() << "QtAudioCapturer::Start - 设置缓冲区大小:" << bufferSize;
+
+        // qDebug() << "QtAudioCapturer::Start - 缓冲区大小:" << m_audioInput->bufferSize();
+
+        // 设置更短的通知间隔,提高响应性
+        // m_audioInput->setNotifyInterval(20); // 20毫秒
+
+        // 启动音频输入
+        m_audioDevice = m_audioInput->start();
+        if (!m_audioDevice) {
+            qWarning() << "QtAudioCapturer::Start - 启动音频输入失败";
+            return false;
+        }
+
+        // qDebug() << "QtAudioCapturer::Start - 麦克风启动成功,缓冲区大小:"
+        //          << m_audioInput->bufferSize();
+
+        // 连接信号
+        connect(m_audioDevice, &QIODevice::readyRead, this, &QtAudioCapturer::handleReadyRead);
+
+        // 添加一个定时器,定期检查音频状态
+        QTimer* statusTimer = new QTimer(this);
+        connect(statusTimer, &QTimer::timeout, [this]() {
+            // qDebug() << "QtAudioCapturer::StatusCheck - 音频输入状态:" << m_audioInput->state()
+            //          << "错误:" << m_audioInput->error()
+            //          << "处理字节数:" << m_audioInput->processedUSecs()
+            //          << "可用字节数:" << (m_audioDevice ? m_audioDevice->bytesAvailable() : 0);
+
+            // 如果状态不是活动状态,尝试重新启动
+            if (m_audioInput->state() != QAudio::ActiveState) {
+                qDebug() << "QtAudioCapturer::StatusCheck - 尝试重新启动音频输入";
+                m_audioInput->stop();
+                m_audioDevice = m_audioInput->start();
+                if (m_audioDevice) {
+                    connect(m_audioDevice,
+                            &QIODevice::readyRead,
+                            this,
+                            &QtAudioCapturer::handleReadyRead);
+                }
+            }
+        });
+        statusTimer->start(2000); // 每2秒检查一次
+    } else if (m_deviceType == Speaker) {
+        // 系统声音捕获的实现
+        qDebug() << "QtAudioCapturer::Start - 尝试启动扬声器捕获";
+    }
+
+    m_isRunning = true;
+    return true;
+}
+
+void QtAudioCapturer::Stop()
+{
+    if (!m_isRunning) {
+        return;
+    }
+
+    m_processTimer.stop();
+
+    if (m_audioDevice) {
+        disconnect(m_audioDevice, &QIODevice::readyRead, this, &QtAudioCapturer::handleReadyRead);
+        m_audioDevice = nullptr;
+    }
+
+    if (m_audioInput) {
+        m_audioInput->stop();
+    }
+
+    m_buffer.buffer().clear();
+    m_buffer.seek(0);
+    m_isRunning = false;
+}
+
+const AudioFormat& QtAudioCapturer::GetFormat() const
+{
+    return m_audioFormat;
+}
+
+void QtAudioCapturer::handleReadyRead()
+{
+    // qDebug() << "handleReadyRead" << m_audioDevice << m_audioDevice->bytesAvailable();
+    if (m_audioDevice) {
+        QByteArray data = m_audioDevice->readAll();
+        // qDebug() << "QtAudioCapturer::handleReadyRead - 收到数据大小:" << data.size() << "字节";
+
+        // 检查数据是否有效
+        if (data.size() > 0) {
+            // 计算音量级别用于调试
+            // float volume = calculateVolume(data);
+            // qDebug() << "QtAudioCapturer::handleReadyRead - 计算的音量级别:" << volume;
+
+            // 确保回调被调用
+            if (m_callback) {
+                m_callback(data.data(), data.size(), m_userInfo);
+            } else {
+                qWarning() << "QtAudioCapturer::handleReadyRead - 回调函数为空";
+            }
+        }
+
+        // 不再需要将数据写入缓冲区,因为已经直接处理了
+        // m_buffer.write(data);
+    } else {
+        // 减少日志输出频率,只在调试时启用
+        // qDebug() << "QtAudioCapturer::handleReadyRead - 没有可用数据";
+    }
+}
+
+void QtAudioCapturer::processAudioData()
+{
+    QByteArray data = m_buffer.buffer();
+    if (data.size() > 0 && m_callback) {
+        qDebug() << "QtAudioCapturer::processAudioData - 处理缓冲区数据,大小:" << data.size()
+                 << "字节";
+
+        // 如果没有在 handleReadyRead 中直接调用回调,则在这里调用
+        // m_callback(data.data(), data.size(), m_userInfo);
+
+        // 清空缓冲区
+        m_buffer.buffer().clear();
+        m_buffer.seek(0);
+    }
+}
+
+void QtAudioCapturer::onAudioNotify()
+{
+    if (m_audioDevice && m_audioDevice->bytesAvailable() > 0) {
+        handleReadyRead();
+    } else {
+        // 尝试直接从 QAudioInput 读取数据
+        QByteArray buffer(m_audioInput->bufferSize() / 4, 0);
+        qint64 len = buffer.size();
+
+        if (len > 0) {
+            qDebug() << "QtAudioCapturer::onAudioNotify - 直接读取到数据,大小:" << len << "字节";
+            buffer.resize(len);
+
+            // 调用回调函数
+            if (m_callback) {
+                m_callback(buffer.data(), buffer.size(), m_userInfo);
+            }
+        }
+    }
+}
+
+bool QtAudioCapturer::initMicrophone()
+{
+    // 获取默认输入设备
+    QAudioDeviceInfo deviceInfo = QAudioDeviceInfo::defaultInputDevice();
+
+    qDebug() << "QtAudioCapturer::initMicrophone - 默认输入设备:" << deviceInfo.deviceName();
+
+    // 检查设备是否有效
+    if (deviceInfo.isNull()) {
+        qWarning() << "QtAudioCapturer::initMicrophone - 没有可用的音频输入设备";
+        return false;
+    }
+
+    // 列出所有可用的音频输入设备
+    qDebug() << "QtAudioCapturer::initMicrophone - 可用的音频输入设备:";
+    for (const auto& device : QAudioDeviceInfo::availableDevices(QAudio::AudioInput)) {
+        qDebug() << "  " << device.deviceName();
+    }
+
+    // 检查格式是否支持
+    if (!deviceInfo.isFormatSupported(m_qtAudioFormat)) {
+        qWarning() << "QtAudioCapturer::initMicrophone - 默认格式不支持,尝试使用最接近的格式";
+
+        // 尝试几种常用格式
+        QList<int> sampleRates = {44100, 48000, 16000, 8000};
+        QList<int> channelCounts = {2, 1};
+        QList<int> sampleSizes = {16, 8};
+
+        bool formatFound = false;
+
+        for (int rate : sampleRates) {
+            for (int channels : channelCounts) {
+                for (int size : sampleSizes) {
+                    QAudioFormat format;
+                    format.setSampleRate(rate);
+                    format.setChannelCount(channels);
+                    format.setSampleSize(size);
+                    format.setCodec("audio/pcm");
+                    format.setByteOrder(QAudioFormat::LittleEndian);
+                    format.setSampleType(QAudioFormat::SignedInt);
+
+                    if (deviceInfo.isFormatSupported(format)) {
+                        m_qtAudioFormat = format;
+                        formatFound = true;
+                        qDebug() << "QtAudioCapturer::initMicrophone - 找到支持的格式:"
+                                 << "采样率=" << rate << "通道数=" << channels
+                                 << "采样大小=" << size;
+                        break;
+                    }
+                }
+                if (formatFound)
+                    break;
+            }
+            if (formatFound)
+                break;
+        }
+
+        if (!formatFound) {
+            // 使用设备支持的最接近的格式
+            m_qtAudioFormat = deviceInfo.nearestFormat(m_qtAudioFormat);
+            qDebug() << "QtAudioCapturer::initMicrophone - 使用最接近的格式:"
+                     << "采样率=" << m_qtAudioFormat.sampleRate()
+                     << "通道数=" << m_qtAudioFormat.channelCount()
+                     << "采样大小=" << m_qtAudioFormat.sampleSize();
+        }
+
+        // 更新 AudioFormat
+        m_audioFormat = AudioFormat(m_qtAudioFormat.sampleRate(),
+                                    m_qtAudioFormat.channelCount(),
+                                    m_qtAudioFormat.sampleSize());
+    }
+
+    // 创建音频输入
+    m_audioInput = std::make_unique<QAudioInput>(deviceInfo, m_qtAudioFormat, this);
+
+    m_audioInfo.reset(new AudioInfo(m_qtAudioFormat));
+    m_audioInfo->start();
+    qDebug() << "QtAudioCapturer::initMicrophone - 音频输入创建成功,格式:"
+             << "采样率=" << m_qtAudioFormat.sampleRate()
+             << "通道数=" << m_qtAudioFormat.channelCount()
+             << "采样大小=" << m_qtAudioFormat.sampleSize();
+
+    return true;
+}
+
+bool QtAudioCapturer::initSpeaker()
+{
+    return false;
+}
+
+void QtAudioCapturer::setupAudioFormat()
+{
+    // 设置Qt音频格式
+    m_qtAudioFormat.setSampleRate(44100); // 与原代码中的 DEFAULT_SAMPLE_RATE 保持一致
+    m_qtAudioFormat.setChannelCount(1);   // 改为单声道,提高兼容性
+    m_qtAudioFormat.setSampleSize(16);    // 与原代码中的 DEFAULT_BITS_PER_SAMPLE 保持一致
+    m_qtAudioFormat.setCodec("audio/pcm");
+    m_qtAudioFormat.setByteOrder(QAudioFormat::LittleEndian);
+    m_qtAudioFormat.setSampleType(QAudioFormat::SignedInt);
+
+    // 设置IAudioCapturer使用的格式
+    m_audioFormat = AudioFormat(m_qtAudioFormat.sampleRate(),
+                                m_qtAudioFormat.channelCount(),
+                                m_qtAudioFormat.sampleSize());
+}
+
+float QtAudioCapturer::calculateVolume(const QByteArray& data)
+{
+    // 计算音频数据的音量级别
+    if (data.isEmpty()) {
+        return 0.0f;
+    }
+
+    // 假设数据是16位有符号整数
+    const int16_t* samples = reinterpret_cast<const int16_t*>(data.constData());
+    int sampleCount = data.size() / sizeof(int16_t);
+
+    // 计算平均绝对值
+    float sum = 0.0f;
+    for (int i = 0; i < sampleCount; ++i) {
+        sum += std::abs(samples[i]);
+    }
+
+    // 归一化到0-1范围
+    float avgAmp = sum / (sampleCount * 32768.0f);
+    return avgAmp;
+}

+ 79 - 0
AvRecorder/capturer/audio/audio_qt_capturer.h

@@ -0,0 +1,79 @@
+#ifndef AUDIO_QT_CAPTURER_H
+#define AUDIO_QT_CAPTURER_H
+
+#include <QAudioInput>
+#include <QAudioOutput>
+#include <QBuffer>
+#include <QObject>
+#include <QTimer>
+#include <memory>
+
+#include "iaudiocapturer.h"
+
+class AudioInfo : public QIODevice
+{
+    Q_OBJECT
+public:
+    AudioInfo(const QAudioFormat& format);
+
+    void start();
+    void stop();
+
+    qreal level() const { return m_level; }
+
+    qint64 readData(char* data, qint64 maxlen) override;
+    qint64 writeData(const char* data, qint64 len) override;
+
+private:
+    const QAudioFormat m_format;
+    quint32 m_maxAmplitude = 0;
+    qreal m_level = 0.0; // 0.0 <= m_level <= 1.0
+
+signals:
+    void update();
+};
+
+class QtAudioCapturer : public QObject, public IAudioCapturer
+{
+    Q_OBJECT
+public:
+    explicit QtAudioCapturer(QObject* parent = nullptr);
+    ~QtAudioCapturer() override;
+
+    // IAudioCapturer接口实现
+    bool Init(Type deviceType, CallBack callback, void* userInfo = nullptr) override;
+    bool Start() override;
+    void Stop() override;
+    const AudioFormat& GetFormat() const override;
+
+private slots:
+    void handleReadyRead();
+    void processAudioData();
+    void onAudioNotify();
+
+private:
+    bool initMicrophone();
+    bool initSpeaker();
+    void setupAudioFormat();
+    float calculateVolume(const QByteArray& data);
+
+private:
+    Type m_deviceType;
+    CallBack m_callback;
+    void* m_userInfo;
+    AudioFormat m_audioFormat;
+    bool m_isRunning;
+
+    // Qt音频相关
+    QAudioFormat m_qtAudioFormat;
+    QScopedPointer<AudioInfo> m_audioInfo;
+
+    std::unique_ptr<QAudioInput> m_audioInput;
+    QIODevice* m_audioDevice;
+    QBuffer m_buffer;
+    QTimer m_processTimer;
+
+    static constexpr int PROCESS_INTERVAL_MS = 20; // 处理间隔,20ms
+};
+
+#endif // AUDIO_QT_CAPTURER_H

+ 1 - 0
AvRecorder/capturer/audio/iaudiocapturer.cpp

@@ -0,0 +1 @@
+#include "iaudiocapturer.h"

+ 61 - 0
AvRecorder/capturer/audio/iaudiocapturer.h

@@ -0,0 +1,61 @@
+#ifndef IAUDIOCAPTURER_H
+#define IAUDIOCAPTURER_H
+
+#include <cstdint>
+#include <functional>
+
+// 平台无关的音频格式结构
+struct AudioFormat
+{
+    uint32_t sampleRate;     // 采样率 (Hz)
+    uint16_t channels;       // 通道数
+    uint16_t bitsPerSample;  // 每个样本的位数
+    uint32_t avgBytesPerSec; // 平均每秒字节数
+    uint16_t blockAlign;     // 数据块对齐大小
+
+    // 默认构造函数
+    AudioFormat()
+        : sampleRate(0)
+        , channels(0)
+        , bitsPerSample(0)
+        , avgBytesPerSec(0)
+        , blockAlign(0)
+    {}
+
+    // 带参数的构造函数
+    AudioFormat(uint32_t rate, uint16_t ch, uint16_t bits)
+        : sampleRate(rate)
+        , channels(ch)
+        , bitsPerSample(bits)
+    {
+        blockAlign = channels * bitsPerSample / 8;
+        avgBytesPerSec = sampleRate * blockAlign;
+    }
+};
+
+class IAudioCapturer
+{
+public:
+    enum Type { Microphone, Speaker };
+
+    using CallBack = std::function<void(void* data, size_t size, void* userInfo)>;
+
+    virtual ~IAudioCapturer() = default;
+
+    virtual bool Init(Type deviceType, CallBack callback, void* userInfo = nullptr) = 0;
+    virtual bool Start() = 0;
+    virtual void Stop() = 0;
+
+    // 获取音频格式
+    virtual const AudioFormat& GetFormat() const = 0;
+
+    // // 获取音频格式信息
+    // virtual int GetSampleRate() const = 0;
+    // virtual int GetChannels() const = 0;
+    // virtual int GetBitsPerSample() const = 0;
+
+    // // 用于兼容现有代码的格式获取方法
+    // virtual void* GetNativeFormat() const = 0;
+};
+
+#endif // IAUDIOCAPTURER_H

+ 48 - 0
AvRecorder/capturer/base_capturer.h

@@ -0,0 +1,48 @@
+#ifndef BASE_CAPTURER_H
+#define BASE_CAPTURER_H
+
+#include <Windows.h>
+extern "C" {
+#include <libavutil/frame.h>
+}
+
+class BaseCapturer
+{
+public:
+    virtual ~BaseCapturer() = default;
+
+    // 基本操作接口
+    virtual bool Open(int width, int height) = 0;
+    virtual void Close() = 0;
+    virtual AVFrame* GetFrame() = 0;
+
+    // 获取捕获器信息
+    virtual int GetWidth() const = 0;
+    virtual int GetHeight() const = 0;
+
+    // 可选功能
+    virtual void SetDrawCursor(bool isDrawCursor) {}
+    virtual HDC GetHdc() { return nullptr; }
+    virtual HDC GetHdc(int borderWidth, int borderHeight) { return nullptr; }
+};
+
+// 窗口捕获器基类
+class WindowCapturer : public BaseCapturer
+{
+public:
+    virtual bool OpenWindow(HWND hwnd, int width, int height) = 0;
+
+    // 实现基类方法
+    bool Open(int width, int height) override { return false; }
+};
+
+// 屏幕捕获器基类
+class MonitorCapturer : public BaseCapturer
+{
+public:
+    virtual bool OpenMonitor(int monitorIdx, int left, int top, int width, int height) = 0;
+
+    // 实现基类方法
+    bool Open(int width, int height) override { return false; }
+};
+#endif // BASE_CAPTURER_H

+ 9 - 0
AvRecorder/capturer/capturer.pri

@@ -0,0 +1,9 @@
+
+include($$PWD/video/video.pri)
+include($$PWD/audio/audio.pri)
+
+HEADERS += \
+    $$PWD/finder.h
+
+SOURCES += \
+    $$PWD/finder.cpp

+ 98 - 0
AvRecorder/capturer/finder.cpp

@@ -0,0 +1,98 @@
+#include "finder.h"
+
+#include <Windows.h>
+#include <array>
+
+const std::vector<WindowFinder::Info>& WindowFinder::GetList(bool isUpdate)
+{
+    if (!isUpdate) {
+        return _list;
+    }
+    _list.clear();
+    EnumWindows(_EnumWindowsProc, (LPARAM) nullptr);
+    return _list;
+}
+
+std::vector<WindowFinder::Info> WindowFinder::_list;
+
+std::wstring WindowFinder::_GetWindowTextStd(HWND hwnd)
+{
+    std::array<WCHAR, 1024> windowText;
+    ::GetWindowTextW(hwnd, windowText.data(), (int)windowText.size());
+    std::wstring title(windowText.data());
+    return title;
+}
+BOOL CALLBACK WindowFinder::_EnumWindowsProc(HWND hwnd, LPARAM lParam)
+{
+    auto title = _GetWindowTextStd(hwnd);
+    if (!IsAltTabWindow(hwnd, title)) {
+        return TRUE;
+    }
+    _list.push_back({hwnd, std::move(title)});
+    return TRUE;
+}
+
+bool WindowFinder::IsAltTabWindow(HWND hwnd, const std::wstring& title)
+{
+    HWND shellWindow = GetShellWindow();
+
+    if (hwnd == shellWindow) {
+        return false;
+    }
+
+    if (title.length() == 0 || title == L"NVIDIA GeForce Overlay") {
+        return false;
+    }
+
+    if (!IsWindowVisible(hwnd)) {
+        return false;
+    }
+
+    if (GetAncestor(hwnd, GA_ROOT) != hwnd) {
+        return false;
+    }
+
+    LONG style = GetWindowLong(hwnd, GWL_STYLE);
+    if (!((style & WS_DISABLED) != WS_DISABLED)) {
+        return false;
+    }
+
+    DWORD cloaked = FALSE;
+    HRESULT hrTemp = DwmGetWindowAttribute(hwnd, DWMWA_CLOAKED, &cloaked, sizeof(cloaked));
+    if (SUCCEEDED(hrTemp) && cloaked == DWM_CLOAKED_SHELL) {
+        return false;
+    }
+
+    return !IsIconic(hwnd);
+}
+
+const std::vector<MonitorFinder::Info>& MonitorFinder::GetList(bool isUpdate)
+{
+    if (!isUpdate) {
+        return _list;
+    }
+    _list.clear();
+    EnumDisplayMonitors(nullptr, nullptr, _MonitorEnumProc, (LPARAM) nullptr);
+    return _list;
+}
+
+std::vector<MonitorFinder::Info> MonitorFinder::_list;
+
+BOOL CALLBACK MonitorFinder::_MonitorEnumProc(
+    HMONITOR hMonitor,  // handle to display monitor
+    HDC hdcMonitor,     // handle to monitor-appropriate device context
+    LPRECT lprcMonitor, // pointer to monitor intersection rectangle
+    LPARAM dwData       // data passed from EnumDisplayMonitors
+)
+{
+    std::wstring name = L"显示器" + std::to_wstring(_list.size() + 1);
+    MONITORINFO monitorInfo;
+    monitorInfo.cbSize = sizeof(monitorInfo);
+    GetMonitorInfoW(hMonitor, &monitorInfo);
+    Info info;
+    info.monitor = hMonitor;
+    info.rect = monitorInfo.rcMonitor;
+    info.title = std::move(name);
+    _list.push_back(std::move(info));
+    return TRUE;
+}

+ 43 - 0
AvRecorder/capturer/finder.h

@@ -0,0 +1,43 @@
+#pragma once
+
+#include <dwmapi.h>
+#include <string>
+#include <vector>
+#include <d3d11.h>
+
+class WindowFinder {
+public:
+    struct Info {
+        HWND hwnd = nullptr;
+        std::wstring title;
+    };
+
+    static const std::vector<Info>& GetList(bool isUpdate = false);
+
+private:
+    static std::vector<Info> _list;
+    static std::wstring _GetWindowTextStd(HWND hwnd);
+    static BOOL CALLBACK _EnumWindowsProc(HWND hwnd, LPARAM lParam);
+    static bool IsAltTabWindow(HWND hwnd, const std::wstring& title);
+};
+
+class MonitorFinder {
+public:
+    struct Info {
+        HMONITOR monitor = nullptr;
+        std::wstring title;
+        RECT rect;
+    };
+
+    static const std::vector<Info>& GetList(bool isUpdate = false);
+
+private:
+    static std::vector<Info> _list;
+
+    static BOOL CALLBACK _MonitorEnumProc(
+        HMONITOR hMonitor,  // handle to display monitor
+        HDC hdcMonitor,     // handle to monitor-appropriate device context
+        LPRECT lprcMonitor, // pointer to monitor intersection rectangle
+        LPARAM dwData       // data passed from EnumDisplayMonitors
+    );
+};

+ 0 - 0
AvRecorder/capturer/ivideocapturer.cpp


+ 276 - 0
AvRecorder/capturer/video/dxgi_capturer.cpp

@@ -0,0 +1,276 @@
+#include "dxgi_capturer.h"
+#include <windows.h>
+
+#include "basic/basic.h"
+#include "basic/frame.h"
+#include "d3d/buffer_filler.h"
+#include "d3d/convert.h"
+#include "d3d/gen_frame.h"
+#include <d3d11.h>
+#include <dxgi1_2.h>
+
+class DxgiCapturerPrivate
+{
+public:
+    DxgiCapturerPrivate() {}
+
+    bool _bInit = false;
+    bool _isCaptureSuccess = false;
+
+    ID3D11Device* _hDevice = nullptr;
+    ID3D11DeviceContext* _hContext = nullptr;
+    IDXGIOutputDuplication* _hDeskDupl = nullptr;
+    IDXGISurface1* _hStagingSurf = nullptr;
+    ID3D11Texture2D* _gdiImage = nullptr;
+    D3D11_TEXTURE2D_DESC _desc;
+    bool _isAttached = false;
+    AVFrame* _xrgbFrame = nullptr;
+    AVFrame* _nv12Frame = nullptr;
+    BufferFiller _xrgbBuffers;
+    BufferFiller _nv12Buffers;
+    D3dConverter _rgbToNv12;
+};
+
+DxgiCapturer::DxgiCapturer()
+    : d(new DxgiCapturerPrivate)
+{
+    ZeroMemory(&d->_desc, sizeof(d->_desc));
+}
+
+DxgiCapturer::~DxgiCapturer()
+{
+    Close();
+    delete d;
+}
+
+bool DxgiCapturer::Open(int left, int top, int width, int height)
+{
+    Close();
+    HRESULT hr = S_OK;
+    d->_isAttached = false;
+
+    if (d->_bInit) {
+        return false;
+    }
+
+    // Driver types supported
+    D3D_DRIVER_TYPE DriverTypes[] = {
+        D3D_DRIVER_TYPE_HARDWARE,
+        D3D_DRIVER_TYPE_WARP,
+        D3D_DRIVER_TYPE_REFERENCE,
+    };
+    UINT NumDriverTypes = ARRAYSIZE(DriverTypes);
+
+    // Feature levels supported
+    D3D_FEATURE_LEVEL FeatureLevels[] = {
+        D3D_FEATURE_LEVEL_11_0,
+        D3D_FEATURE_LEVEL_10_1,
+        D3D_FEATURE_LEVEL_10_0,
+        D3D_FEATURE_LEVEL_9_1};
+    UINT NumFeatureLevels = ARRAYSIZE(FeatureLevels);
+
+    D3D_FEATURE_LEVEL FeatureLevel;
+
+    // Create D3D device
+    for (UINT DriverTypeIndex = 0; DriverTypeIndex < NumDriverTypes; ++DriverTypeIndex) {
+        hr = D3D11CreateDevice(nullptr,
+                               DriverTypes[DriverTypeIndex],
+                               nullptr,
+                               0,
+                               FeatureLevels,
+                               NumFeatureLevels,
+                               D3D11_SDK_VERSION,
+                               &d->_hDevice,
+                               &FeatureLevel,
+                               &d->_hContext);
+        if (SUCCEEDED(hr)) {
+            break;
+        }
+    }
+    __CheckBool(SUCCEEDED(hr));
+
+    // Get DXGI device
+    IDXGIDevice* hDxgiDevice = nullptr;
+    __CheckBool(SUCCEEDED(d->_hDevice->QueryInterface(__uuidof(IDXGIDevice),
+                                                      reinterpret_cast<void**>(&hDxgiDevice))));
+
+    // Get DXGI adapter
+    IDXGIAdapter* hDxgiAdapter = nullptr;
+    hr = hDxgiDevice->GetParent(__uuidof(IDXGIAdapter), reinterpret_cast<void**>(&hDxgiAdapter));
+    Free(hDxgiDevice, [=] { hDxgiDevice->Release(); });
+    __CheckBool(SUCCEEDED(hr));
+
+    // Get output
+    INT nOutput = 0;
+    IDXGIOutput* hDxgiOutput = nullptr;
+    DXGI_OUTPUT_DESC dxgiOutDesc;
+    ZeroMemory(&dxgiOutDesc, sizeof(dxgiOutDesc));
+
+    for (int idx = 0; SUCCEEDED(hr = hDxgiAdapter->EnumOutputs(idx, &hDxgiOutput)); ++idx) {
+        // get output description struct
+        hDxgiOutput->GetDesc(&dxgiOutDesc);
+        if (dxgiOutDesc.DesktopCoordinates.left == left
+            && dxgiOutDesc.DesktopCoordinates.top == top) { // 寻找显示器
+            break;
+        }
+    }
+    Free(hDxgiAdapter, [=] { hDxgiAdapter->Release(); });
+    __CheckBool(SUCCEEDED(hr));
+
+    // QI for Output 1
+    IDXGIOutput1* hDxgiOutput1 = nullptr;
+    hr = hDxgiOutput->QueryInterface(__uuidof(hDxgiOutput1), reinterpret_cast<void**>(&hDxgiOutput1));
+    Free(hDxgiOutput, [=] { hDxgiOutput->Release(); });
+    __CheckBool(SUCCEEDED(hr));
+
+    // Create desktop duplication
+    hr = hDxgiOutput1->DuplicateOutput(d->_hDevice, &d->_hDeskDupl);
+    Free(hDxgiOutput1, [=] { hDxgiOutput1->Release(); });
+    __CheckBool(SUCCEEDED(hr));
+
+    // Set ColorSpace
+    D3D11_VIDEO_PROCESSOR_COLOR_SPACE inputColorSpace;
+    inputColorSpace.Usage = 1;
+    inputColorSpace.RGB_Range = 0;
+    inputColorSpace.YCbCr_Matrix = 1;
+    inputColorSpace.YCbCr_xvYCC = 0;
+    inputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_0_255;
+
+    D3D11_VIDEO_PROCESSOR_COLOR_SPACE outputColorSpace;
+    outputColorSpace.Usage = 0;
+    outputColorSpace.RGB_Range = 0;
+    outputColorSpace.YCbCr_Matrix = 1;
+    outputColorSpace.YCbCr_xvYCC = 0;
+    outputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_16_235;
+    d->_rgbToNv12.Open(d->_hDevice, d->_hContext, inputColorSpace, outputColorSpace);
+    d->_nv12Frame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_NV12, width, height);
+    d->_xrgbFrame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_BGR0, width, height);
+    __CheckBool(d->_nv12Frame);
+    __CheckBool(d->_xrgbFrame);
+    // 初始化成功
+    d->_bInit = true;
+    return true;
+}
+void DxgiCapturer::Close()
+{
+    if (!d->_bInit) {
+        return;
+    }
+
+    d->_bInit = false;
+    d->_nv12Buffers.Clear();
+    d->_xrgbBuffers.Clear();
+    d->_rgbToNv12.Close();
+    Free(d->_nv12Frame, [this] { av_frame_free(&d->_nv12Frame); });
+    Free(d->_xrgbFrame, [this] { av_frame_free(&d->_xrgbFrame); });
+    Free(d->_hDeskDupl, [this] { d->_hDeskDupl->Release(); });
+    Free(d->_hDevice, [this] { d->_hDevice->Release(); });
+    Free(d->_hContext, [this] { d->_hContext->Release(); });
+}
+
+HDC DxgiCapturer::GetHdc()
+{
+    d->_isCaptureSuccess = false;
+    if (!d->_bInit) {
+        return nullptr;
+    }
+
+    IDXGIResource* hDesktopResource = nullptr;
+    DXGI_OUTDUPL_FRAME_INFO FrameInfo;
+    HRESULT hr = d->_hDeskDupl->AcquireNextFrame(0, &FrameInfo, &hDesktopResource);
+    if (FAILED(hr)) {
+        if (hr == DXGI_ERROR_WAIT_TIMEOUT) { // 这里是因为当桌面没有动画更新时就会有一个错误值,不进行错误打印
+            return nullptr;
+        }
+        return nullptr;
+    }
+
+    // query next frame staging buffer
+    ID3D11Texture2D* srcImage = nullptr;
+    hr = hDesktopResource->QueryInterface(__uuidof(ID3D11Texture2D), reinterpret_cast<void**>(&srcImage));
+    Free(hDesktopResource, [=] { hDesktopResource->Release(); });
+    __CheckNullptr(SUCCEEDED(hr));
+
+    srcImage->GetDesc(&d->_desc);
+
+    // create a new staging buffer for fill frame image
+    auto desc = d->_desc;
+    desc.ArraySize = 1;
+    desc.BindFlags = D3D11_BIND_FLAG::D3D11_BIND_RENDER_TARGET;
+    desc.MiscFlags = D3D11_RESOURCE_MISC_GDI_COMPATIBLE;
+    desc.SampleDesc.Count = 1;
+    desc.SampleDesc.Quality = 0;
+    desc.MipLevels = 1;
+    desc.CPUAccessFlags = 0;
+    desc.Usage = D3D11_USAGE_DEFAULT;
+    hr = d->_hDevice->CreateTexture2D(&desc, nullptr, &d->_gdiImage);
+    if (FAILED(hr)) {
+        __DebugPrint("Create _gdiImage failed");
+        Free(srcImage, [=] { srcImage->Release(); });
+        Free(d->_hDeskDupl, [this] { d->_hDeskDupl->ReleaseFrame(); });
+        return nullptr;
+    }
+
+    // copy next staging buffer to new staging buffer
+    d->_hContext->CopyResource(d->_gdiImage, srcImage);
+    Free(srcImage, [=] { srcImage->Release(); });
+    d->_hDeskDupl->ReleaseFrame();
+
+    // create staging buffer for map bits
+    d->_hStagingSurf = nullptr;
+    hr = d->_gdiImage->QueryInterface(__uuidof(IDXGISurface), (void**) (&d->_hStagingSurf));
+    if (FAILED(hr)) {
+        __DebugPrint("_gdiImage->QueryInterface failed");
+        Free(d->_gdiImage, [this] { d->_gdiImage->Release(); });
+        return nullptr;
+    }
+
+    d->_isCaptureSuccess = true;
+    HDC hdc = nullptr;
+    // if GetDc is failed, the hdc is nullptr
+    d->_hStagingSurf->GetDC(FALSE, &hdc);
+    return hdc;
+}
+
+AVFrame* DxgiCapturer::GetFrame()
+{
+    if (!d->_isCaptureSuccess) {
+        return nullptr;
+    }
+    d->_isCaptureSuccess = false;
+    d->_hStagingSurf->ReleaseDC(nullptr);
+
+    // 创建一个临时的纹理
+    ID3D11Texture2D* tmpImage = nullptr;
+    d->_desc.MiscFlags = 2050;
+    __CheckNullptr(SUCCEEDED(d->_hDevice->CreateTexture2D(&d->_desc, nullptr, &tmpImage)));
+    d->_hContext->CopyResource(tmpImage, d->_gdiImage);
+
+    // 首先尝试创建 NV12 纹理
+    AVFrame* frame = nullptr;
+    auto tmpFormat = d->_desc.Format;
+    d->_desc.Format = DXGI_FORMAT_NV12;
+    if (GenNv12Frame(d->_hDevice,
+                     d->_hContext,
+                     d->_desc,
+                     tmpImage,
+                     d->_nv12Buffers,
+                     d->_nv12Frame,
+                     d->_rgbToNv12)) {
+        frame = d->_nv12Frame;
+    } else {
+        d->_desc.Format = tmpFormat;
+        GenRgbFrame(d->_hDevice,
+                    d->_hContext,
+                    d->_desc,
+                    d->_gdiImage,
+                    d->_xrgbBuffers,
+                    d->_xrgbFrame);
+        frame = d->_xrgbFrame;
+    }
+    Free(d->_hStagingSurf, [this] { d->_hStagingSurf->Release(); });
+    Free(tmpImage, [&tmpImage] { tmpImage->Release(); });
+    Free(d->_gdiImage, [this] { d->_gdiImage->Release(); });
+
+    return frame;
+}

+ 27 - 0
AvRecorder/capturer/video/dxgi_capturer.h

@@ -0,0 +1,27 @@
+#ifndef __DXGI_CAPTURER_H__
+#define __DXGI_CAPTURER_H__
+
+#include <d3d11.h>
+#include <dxgi1_2.h>
+
+struct AVFrame;
+
+class DxgiCapturer
+{
+public:
+    DxgiCapturer();
+    ~DxgiCapturer();
+
+public:
+    bool Open(int left, int top, int width, int height);
+    void Close();
+
+public:
+    HDC GetHdc();
+    AVFrame* GetFrame();
+
+private:
+    class DxgiCapturerPrivate* d;
+};
+
+#endif

+ 56 - 0
AvRecorder/capturer/video/gdi_capturer.cpp

@@ -0,0 +1,56 @@
+
+#include "gdi_capturer.h"
+#include "basic/basic.h"
+
+bool GdiCapturer::Open(HWND hwnd, int width, int height)
+{
+    Close();
+    m_width = width;
+    m_height = height;
+    _srcHdc = GetWindowDC(hwnd);
+    _dstHdc = CreateCompatibleDC(_srcHdc);
+    _bitmap = CreateCompatibleBitmap(_srcHdc, width, height);
+    SelectObject(_dstHdc, _bitmap);
+
+    _bitmapInfo.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
+    _bitmapInfo.bmiHeader.biPlanes = 1;
+    _bitmapInfo.bmiHeader.biBitCount = 24;
+    _bitmapInfo.bmiHeader.biWidth = width;
+    _bitmapInfo.bmiHeader.biHeight = height;
+    _bitmapInfo.bmiHeader.biCompression = BI_RGB;
+    _bitmapInfo.bmiHeader.biSizeImage = width * height;
+
+    // 创建缓存帧
+    _frame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_BGR24, width, height);
+    return true;
+}
+
+HDC GdiCapturer::GetHdc(int borderWidth, int borderHeight)
+{
+    __CheckNullptr(
+        BitBlt(_dstHdc, 0, 0, m_width, m_height,
+            _srcHdc, borderWidth / 2, borderHeight - borderWidth / 2, SRCCOPY));
+
+    return _dstHdc;
+}
+
+AVFrame* GdiCapturer::GetFrame()
+{
+    auto linesize = _frame->linesize[0];
+    for (int row = 0; row < m_height; ++row) {
+        __CheckNullptr(GetDIBits(_dstHdc, _bitmap, m_height - 1 - row, 1, _frame->data[0] + row * linesize, &_bitmapInfo, DIB_RGB_COLORS));
+    }
+    return _frame;
+}
+
+void GdiCapturer::Close()
+{
+    Free(_frame, [this] { av_frame_free(&_frame); });
+    Free(_dstHdc, [this] { DeleteObject(_dstHdc); });
+    Free(_bitmap, [this] { DeleteObject(_bitmap); });
+}
+
+GdiCapturer::~GdiCapturer()
+{
+    Close();
+}

+ 25 - 0
AvRecorder/capturer/video/gdi_capturer.h

@@ -0,0 +1,25 @@
+#ifndef __GDI_CAPTURER_H__
+#define __GDI_CAPTURER_H__
+
+#include "basic/frame.h"
+#include <Windows.h>
+
+class GdiCapturer {
+public:
+    bool Open(HWND hwnd, int width, int height);
+    HDC GetHdc(int borderWidth, int borderHeight);
+    AVFrame* GetFrame();
+    void Close();
+    ~GdiCapturer();
+
+private:
+    HDC _srcHdc = nullptr;
+    HDC _dstHdc = nullptr;
+    HBITMAP _bitmap = nullptr;
+    BITMAPINFO _bitmapInfo;
+    int m_width = 0;
+    int m_height = 0;
+    AVFrame* _frame = nullptr;
+};
+
+#endif

+ 3 - 0
AvRecorder/capturer/video/ivideocapturer.cpp

@@ -0,0 +1,3 @@
+#include "ivideocapturer.h"
+
+ivideocapturer::ivideocapturer() {}

+ 12 - 0
AvRecorder/capturer/video/ivideocapturer.h

@@ -0,0 +1,12 @@
+#ifndef IVIDEOCAPTURER_H
+#define IVIDEOCAPTURER_H
+
+class ivideocapturer
+{
+public:
+    ivideocapturer();
+
+    virtual bool open(int left, int top, int width, int height) = 0;
+};
+
+#endif // IVIDEOCAPTURER_H

+ 25 - 0
AvRecorder/capturer/video/video.pri

@@ -0,0 +1,25 @@
+HEADERS += \
+    $$PWD/ivideocapturer.h \
+    $$PWD/dxgi_capturer.h \
+    $$PWD/gdi_capturer.h \
+    $$PWD/video_capturer.h \
+    $$PWD/wgc/App.h \
+    $$PWD/wgc/SimpleCapture.h \
+    $$PWD/wgc/capture.interop.h \
+    $$PWD/wgc/composition.interop.h \
+    $$PWD/wgc/d3dHelpers.h \
+    $$PWD/wgc/direct3d11.interop.h \
+    $$PWD/wgc/pch.h \
+    $$PWD/wgc/winrt.h \
+    $$PWD/wgc_capturer.h
+
+SOURCES += \
+    $$PWD/ivideocapturer.cpp \
+    $$PWD/dxgi_capturer.cpp \
+    $$PWD/gdi_capturer.cpp \
+    $$PWD/video_capturer.cpp \
+    $$PWD/wgc/App.cpp \
+    $$PWD/wgc/SimpleCapture.cpp \
+    $$PWD/wgc/pch.cpp \
+    $$PWD/wgc/winrt.cpp \
+    $$PWD/wgc_capturer.cpp

+ 145 - 0
AvRecorder/capturer/video/video_capturer.cpp

@@ -0,0 +1,145 @@
+#include "video_capturer.h"
+#include "capturer/finder.h"
+
+bool VideoCapturer::Open(HWND hwnd, Method method)
+{
+    Close();
+    __CheckBool(hwnd);
+    m_srcHwnd = hwnd;
+    __CheckBool(_GetHwndSize(m_srcHwnd));
+    m_usingMethod = method;
+    m_type = WINDOW;
+    switch (method) {
+    case WGC: {
+        m_wgcCapturer = WgcCapturer::New();
+        __CheckBool(m_wgcCapturer->StartCapturerWindow(hwnd, m_width, m_height));
+        break;
+    }
+
+    default: { // GDI
+        m_gdiCapturer = new GdiCapturer;
+        __CheckBool(m_gdiCapturer->Open(hwnd, m_width, m_height));
+        break;
+    }
+    }
+
+    return true;
+}
+
+bool VideoCapturer::Open(int monitorIdx, Method method)
+{
+    Close();
+    auto&& monitorInfo = MonitorFinder::GetList()[monitorIdx];
+    m_rect = monitorInfo.rect;
+    m_borderHeight = 0;
+    m_borderWidth = 0;
+    m_width = m_rect.right - m_rect.left;
+    m_height = m_rect.bottom - m_rect.top;
+    m_usingMethod = method;
+    m_type = MONITOR;
+    switch (method) {
+    case WGC: {
+        auto monitor = monitorInfo.monitor;
+        m_wgcCapturer = WgcCapturer::New();
+        __CheckBool(m_wgcCapturer->StartCapturerMonitor(monitor, m_width, m_height));
+        break;
+    }
+
+    default: { // DXGI
+        m_dxgiCapturer = new DxgiCapturer;
+        __CheckBool(m_dxgiCapturer->Open(m_rect.left, m_rect.top, m_width, m_height));
+        break;
+    }
+    }
+    return true;
+}
+
+AVFrame* VideoCapturer::GetFrame()
+{
+    switch (m_usingMethod) {
+    case WGC: // 该捕获方式自动就将鼠标画好了,我们不需要再自己画鼠标
+        return m_wgcCapturer->GetFrame();
+    case DXGI: {
+        auto hdc = m_dxgiCapturer->GetHdc();
+        if (m_isDrawCursor && hdc) {
+            _DrawCursor(hdc);
+        }
+        return m_dxgiCapturer->GetFrame();
+    }
+    default: // GDI
+        auto hdc = m_gdiCapturer->GetHdc(m_borderWidth, m_borderHeight);
+        if (m_isDrawCursor && hdc) {
+            _DrawCursor(hdc);
+        }
+        return m_gdiCapturer->GetFrame();
+    }
+}
+
+void VideoCapturer::SetDrawCursor(bool isDrawCursor)
+{
+    m_isDrawCursor = isDrawCursor;
+    if (m_usingMethod == WGC) {
+        m_wgcCapturer->SetDrawCursor(m_isDrawCursor);
+    }
+}
+
+void VideoCapturer::Close()
+{
+    Free(m_dxgiCapturer, [this] { m_dxgiCapturer->Close(); delete m_dxgiCapturer; });
+    Free(m_gdiCapturer, [this] { m_gdiCapturer->Close(); delete m_gdiCapturer; });
+    Free(m_wgcCapturer, [this] { m_wgcCapturer->Close(); });
+}
+
+VideoCapturer::~VideoCapturer()
+{
+    Close();
+}
+
+int VideoCapturer::GetWidth() const
+{
+    return m_width;
+}
+int VideoCapturer::GetHeight() const
+{
+    return m_height;
+}
+
+bool VideoCapturer::_GetHwndSize(HWND hwnd)
+{
+    RECT rect;
+    __CheckBool(GetClientRect(hwnd, &rect));
+    m_rect = rect;
+    m_width = (rect.right - rect.left);
+    m_height = (rect.bottom - rect.top);
+    __CheckBool(GetWindowRect(hwnd, &rect));
+    m_borderHeight = rect.bottom - rect.top - m_height;
+    m_borderWidth = rect.right - rect.left - m_width;
+    if (m_borderHeight < 0) {
+        m_borderHeight = 0;
+    }
+    if (m_borderWidth < 0) {
+        m_borderWidth = 0;
+    }
+    return true;
+}
+
+void VideoCapturer::_DrawCursor(HDC hdc)
+{
+    CURSORINFO ci;
+    ci.cbSize = sizeof(CURSORINFO);
+    __CheckNo(GetCursorInfo(&ci));
+    int cursorX = ci.ptScreenPos.x;
+    int cursorY = ci.ptScreenPos.y;
+
+    if (cursorX > m_rect.right || cursorX < m_rect.left
+        || cursorY > m_rect.bottom || cursorY < m_rect.top) {
+        return; // 超出显示范围
+    }
+
+    if (ci.flags == CURSOR_SHOWING) {
+        // 将光标画到屏幕所在位置
+        int x = cursorX - m_rect.left;
+        int y = cursorY - m_rect.top;
+        __CheckNo(DrawIconEx(hdc, x, y, ci.hCursor, 0, 0, 0, NULL, DI_NORMAL | DI_COMPAT));
+    }
+}

+ 46 - 0
AvRecorder/capturer/video/video_capturer.h

@@ -0,0 +1,46 @@
+#ifndef __AV_CAPTURER_H__
+#define __AV_CAPTURER_H__
+
+#include "dxgi_capturer.h"
+#include "gdi_capturer.h"
+#include "wgc_capturer.h"
+
+class VideoCapturer {
+public:
+    enum Method {
+        GDI,
+        DXGI,
+        WGC
+    };
+
+    enum Type {
+        WINDOW,
+        MONITOR
+    };
+    ~VideoCapturer();
+    bool Open(HWND hwnd, Method method);
+    bool Open(int monitorIdx, Method method);
+    AVFrame* GetFrame();
+    void SetDrawCursor(bool isDrawCursor);
+    void Close();
+    int GetWidth() const;
+    int GetHeight() const;
+    Method GetMethod() const { return m_usingMethod; }
+
+private:
+    bool _GetHwndSize(HWND hwnd);
+    void _DrawCursor(HDC hdc);
+    Method m_usingMethod = WGC;
+    RECT m_rect;
+    Type m_type = MONITOR;
+    DxgiCapturer* m_dxgiCapturer = nullptr;
+    GdiCapturer* m_gdiCapturer = nullptr;
+    WgcCapturer* m_wgcCapturer = nullptr;
+    int m_width = 0;
+    int m_height = 0;
+    int m_borderHeight = 0;
+    int m_borderWidth = 0;
+    HWND m_srcHwnd = nullptr;
+    bool m_isDrawCursor = true;
+};
+#endif

+ 88 - 0
AvRecorder/capturer/video/wgc/App.cpp

@@ -0,0 +1,88 @@
+#include "App.h"
+// D3D
+#include <d2d1_3.h>
+#include <d3d11_4.h>
+#include <dxgi1_6.h>
+#include <wincodec.h>
+
+#include "pch.h"
+
+#include "basic/frame.h"
+
+using namespace winrt;
+using namespace Windows::System;
+using namespace Windows::Foundation;
+using namespace Windows::UI;
+using namespace Windows::UI::Composition;
+using namespace Windows::Graphics::Capture;
+
+void App::Initialize(ContainerVisual const& root)
+{
+    auto queue = DispatcherQueue::GetForCurrentThread();
+
+    m_compositor = root.Compositor();
+    m_root = m_compositor.CreateContainerVisual();
+    m_content = m_compositor.CreateSpriteVisual();
+    m_brush = m_compositor.CreateSurfaceBrush();
+
+    m_root.RelativeSizeAdjustment({1, 1});
+    root.Children().InsertAtTop(m_root);
+
+    m_content.AnchorPoint({0.5f, 0.5f});
+    m_content.RelativeOffsetAdjustment({0.5f, 0.5f, 0});
+    m_content.RelativeSizeAdjustment({1, 1});
+    m_content.Size({-80, -80});
+    m_content.Brush(m_brush);
+    m_brush.HorizontalAlignmentRatio(0.5f);
+    m_brush.VerticalAlignmentRatio(0.5f);
+    m_brush.Stretch(CompositionStretch::Uniform);
+    auto shadow = m_compositor.CreateDropShadow();
+    shadow.Mask(m_brush);
+    m_content.Shadow(shadow);
+    m_root.Children().InsertAtTop(m_content);
+
+    auto d3dDevice = CreateD3DDevice();
+    auto dxgiDevice = d3dDevice.as<IDXGIDevice>();
+    m_device = CreateDirect3DDevice(dxgiDevice.get());
+}
+
+void App::Close()
+{
+    if (m_capture) {
+        m_capture->Close();
+        delete m_capture;
+        m_capture = nullptr;
+    }
+}
+
+bool App::StartCaptureWindow(HWND hwnd, int width, int height)
+{
+    Close();
+    auto item = CreateCaptureItemForWindow(hwnd);
+    __CheckBool(item);
+    m_capture = new SimpleCapture(m_device, item, width, height);
+    auto surface = m_capture->CreateSurface(m_compositor);
+    m_brush.Surface(surface);
+    m_capture->StartCapture();
+    return true;
+}
+
+void App::SetDrawCursor(bool isDrawCursor)
+{
+    if (m_capture == nullptr) {
+        return;
+    }
+    m_capture->SetDrawCursor(isDrawCursor);
+}
+
+bool App::StartCaptureMonitor(HMONITOR monitor, int width, int height)
+{
+    Close();
+    auto item = CreateCaptureItemForMonitor(monitor);
+    __CheckBool(item);
+    m_capture = new SimpleCapture(m_device, item, width, height);
+    auto surface = m_capture->CreateSurface(m_compositor);
+    m_brush.Surface(surface);
+    m_capture->StartCapture();
+    return true;
+}

+ 46 - 0
AvRecorder/capturer/video/wgc/App.h

@@ -0,0 +1,46 @@
+#pragma once
+
+#include <guiddef.h>
+// WinRT
+
+#include <winrt/Windows.Foundation.Numerics.h>
+#include <winrt/Windows.Foundation.h>
+#include <winrt/Windows.Graphics.Capture.h>
+#include <winrt/Windows.Graphics.DirectX.Direct3d11.h>
+#include <winrt/Windows.Graphics.DirectX.h>
+#include <winrt/Windows.Graphics.Imaging.h>
+#include <winrt/Windows.Storage.Streams.h>
+#include <winrt/Windows.Storage.h>
+#include <winrt/Windows.System.h>
+#include <winrt/Windows.UI.Composition.Desktop.h>
+#include <winrt/Windows.UI.Composition.h>
+#include <winrt/Windows.UI.Popups.h>
+#include <winrt/Windows.UI.h>
+
+#include <winrt/Windows.Foundation.Collections.h>
+
+#include "SimpleCapture.h"
+
+class App {
+public:
+    App() { }
+    ~App() { }
+
+    void Initialize(
+        winrt::Windows::UI::Composition::ContainerVisual const& root);
+
+    bool StartCaptureWindow(HWND hwnd, int width, int height);
+    bool StartCaptureMonitor(HMONITOR monitor, int width, int height);
+    void SetDrawCursor(bool isDrawCursor);
+    void Close();
+    AVFrame* GetFrame() { return m_capture->GetFrame(); }
+
+private:
+    winrt::Windows::UI::Composition::Compositor m_compositor {nullptr};
+    winrt::Windows::UI::Composition::ContainerVisual m_root {nullptr};
+    winrt::Windows::UI::Composition::SpriteVisual m_content {nullptr};
+    winrt::Windows::UI::Composition::CompositionSurfaceBrush m_brush {nullptr};
+
+    winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice m_device {nullptr};
+    SimpleCapture* m_capture = nullptr;
+};

+ 175 - 0
AvRecorder/capturer/video/wgc/SimpleCapture.cpp

@@ -0,0 +1,175 @@
+
+// D3D
+#include <d3d11_4.h>
+#include <dxgi1_6.h>
+#include <d2d1_3.h>
+#include <wincodec.h>
+
+#include "pch.h"
+#include "SimpleCapture.h"
+#include "basic/basic.h"
+
+using namespace winrt;
+using namespace Windows;
+using namespace Windows::Foundation;
+using namespace Windows::System;
+using namespace Windows::Graphics;
+using namespace Windows::Graphics::Capture;
+using namespace Windows::Graphics::DirectX;
+using namespace Windows::Graphics::DirectX::Direct3D11;
+using namespace Windows::Foundation::Numerics;
+using namespace Windows::UI;
+using namespace Windows::UI::Composition;
+
+#undef min
+#undef max
+
+SimpleCapture::SimpleCapture(
+    IDirect3DDevice const& device,
+    GraphicsCaptureItem const& item,
+    int width, int height)
+{
+    m_item = item;
+    m_device = device;
+
+    // Set up
+    auto d3dDevice = GetDXGIInterfaceFromObject<ID3D11Device>(m_device);
+    d3dDevice->GetImmediateContext(m_d3dContext.put());
+    auto size = m_item.Size();
+
+    m_swapChain = CreateDXGISwapChain(
+        d3dDevice,
+        static_cast<uint32_t>(size.Width),
+        static_cast<uint32_t>(size.Height),
+        static_cast<DXGI_FORMAT>(DirectXPixelFormat::B8G8R8A8UIntNormalized),
+        2);
+
+    // Create framepool, define pixel format (DXGI_FORMAT_B8G8R8A8_UNORM), and frame size.
+    m_framePool = Direct3D11CaptureFramePool::Create(
+        m_device,
+        DirectXPixelFormat::B8G8R8A8UIntNormalized,
+        2,
+        size);
+
+    m_session = m_framePool.CreateCaptureSession(m_item);
+    m_lastSize = size;
+    m_frameArrived = m_framePool.FrameArrived(auto_revoke, {this, &SimpleCapture::OnFrameArrived});
+
+    // Set ColorSpace
+    D3D11_VIDEO_PROCESSOR_COLOR_SPACE inputColorSpace;
+    inputColorSpace.Usage = 1;
+    inputColorSpace.RGB_Range = 0;
+    inputColorSpace.YCbCr_Matrix = 1;
+    inputColorSpace.YCbCr_xvYCC = 0;
+    inputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_0_255;
+
+    D3D11_VIDEO_PROCESSOR_COLOR_SPACE outputColorSpace;
+    outputColorSpace.Usage = 0;
+    outputColorSpace.RGB_Range = 0;
+    outputColorSpace.YCbCr_Matrix = 1;
+    outputColorSpace.YCbCr_xvYCC = 0;
+    outputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_16_235;
+    m_rgbToNv12.Open(d3dDevice.get(), m_d3dContext.get(), inputColorSpace, outputColorSpace);
+    m_nv12Frame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_NV12, width, height);
+    m_xrgbFrame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_BGR0, width, height);
+    __CheckNo(m_nv12Frame);
+    __CheckNo(m_xrgbFrame);
+    m_isCapture = true;
+    m_cnt = 5;
+}
+
+// Start sending capture frames
+void SimpleCapture::StartCapture()
+{
+    CheckClosed();
+    m_session.StartCapture();
+}
+
+ICompositionSurface SimpleCapture::CreateSurface(
+    Compositor const& compositor)
+{
+    CheckClosed();
+    return CreateCompositionSurfaceForSwapChain(compositor, m_swapChain.get());
+}
+
+// Process captured frames
+void SimpleCapture::Close()
+{
+    auto expected = false;
+    if (m_closed.compare_exchange_strong(expected, true)) {
+        m_frameArrived.revoke();
+        m_framePool.Close();
+        m_session.Close();
+        m_swapChain = nullptr;
+        m_framePool = nullptr;
+        m_session = nullptr;
+        m_item = nullptr;
+    }
+    m_nv12Buffers.Clear();
+    m_xrgbBuffers.Clear();
+    m_rgbToNv12.Close();
+    Free(m_nv12Frame, [this] { av_frame_free(&m_nv12Frame); });
+    Free(m_xrgbFrame, [this] { av_frame_free(&m_xrgbFrame); });
+}
+
+void SimpleCapture::OnFrameArrived(
+    Direct3D11CaptureFramePool const& sender,
+    winrt::Windows::Foundation::IInspectable const&)
+{
+    auto newSize = false;
+    auto frame = sender.TryGetNextFrame();
+    auto frameContentSize = frame.ContentSize();
+    if (frameContentSize.Width != m_lastSize.Width || frameContentSize.Height != m_lastSize.Height) {
+        // The thing we have been capturing has changed size.
+        // We need to resize our swap chain first, then blit the pixels.
+        // After we do that, retire the frame and then recreate our frame pool.
+        newSize = true;
+        m_lastSize = frameContentSize;
+        m_swapChain->ResizeBuffers(
+            2,
+            static_cast<uint32_t>(m_lastSize.Width),
+            static_cast<uint32_t>(m_lastSize.Height),
+            static_cast<DXGI_FORMAT>(DirectXPixelFormat::B8G8R8A8UIntNormalized),
+            0);
+        m_nv12Buffers.Clear();
+        m_xrgbBuffers.Clear();
+    }
+    if (m_cnt > 0) {
+        --m_cnt;
+    }
+    m_isCapture = (m_isCapture && !newSize) || m_cnt > 0;
+    if (m_isCapture) {
+        auto frameSurface = GetDXGIInterfaceFromObject<ID3D11Texture2D>(frame.Surface());
+        D3D11_TEXTURE2D_DESC desc;
+        frameSurface->GetDesc(&desc);
+        auto d3dDevice = GetDXGIInterfaceFromObject<ID3D11Device>(m_device);
+
+        // 首先尝试创建 NV12 纹理
+        auto tmpFormat = desc.Format;
+        desc.Format = DXGI_FORMAT_NV12;
+        if (GenNv12Frame(d3dDevice.get(), m_d3dContext.get(), desc, frameSurface.get(),
+                m_nv12Buffers, m_nv12Frame, m_rgbToNv12)) {
+            m_pixType = _PixType::NV12;
+        } else {
+            desc.Format = tmpFormat;
+            GenRgbFrame(d3dDevice.get(), m_d3dContext.get(), desc, frameSurface.get(),
+                m_xrgbBuffers, m_xrgbFrame);
+            m_pixType = _PixType::RGB;
+        }
+    }
+
+    // com_ptr<ID3D11Texture2D> backBuffer;
+    // check_hresult(m_swapChain->GetBuffer(0, guid_of<ID3D11Texture2D>(), backBuffer.put_void()));
+    // m_d3dContext->CopyResource(backBuffer.get(), m_bufferFiller.GetMap());
+
+    // DXGI_PRESENT_PARAMETERS presentParameters = {0};
+    // auto hr = m_swapChain->Present1(1, 0, &presentParameters);
+
+    if (newSize) {
+        m_framePool.Recreate(
+            m_device,
+            DirectXPixelFormat::B8G8R8A8UIntNormalized,
+            2,
+            m_lastSize);
+    }
+}

+ 62 - 0
AvRecorder/capturer/video/wgc/SimpleCapture.h

@@ -0,0 +1,62 @@
+
+#pragma once
+
+#include <chrono>
+#include "d3d/gen_frame.h"
+
+class SimpleCapture {
+public:
+    SimpleCapture(
+        winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice const& device,
+        winrt::Windows::Graphics::Capture::GraphicsCaptureItem const& item,
+        int width, int height);
+    ~SimpleCapture() { Close(); }
+
+    void StartCapture();
+    winrt::Windows::UI::Composition::ICompositionSurface CreateSurface(
+        winrt::Windows::UI::Composition::Compositor const& compositor);
+
+    void SetDrawCursor(bool isDrawCursor) { m_session.IsCursorCaptureEnabled(isDrawCursor); }
+
+    void Close();
+
+    AVFrame* GetFrame() const noexcept { return m_pixType == NV12 ? m_nv12Frame : m_xrgbFrame; }
+
+private:
+    void OnFrameArrived(
+        winrt::Windows::Graphics::Capture::Direct3D11CaptureFramePool const& sender,
+        winrt::Windows::Foundation::IInspectable const& args);
+
+    void CheckClosed()
+    {
+        if (m_closed.load() == true) {
+            throw winrt::hresult_error(RO_E_CLOSED);
+        }
+    }
+
+private:
+    enum _PixType {
+        NV12,
+        RGB
+    };
+
+    winrt::Windows::Graphics::Capture::GraphicsCaptureItem m_item {nullptr};
+    winrt::Windows::Graphics::Capture::Direct3D11CaptureFramePool m_framePool {nullptr};
+    winrt::Windows::Graphics::Capture::GraphicsCaptureSession m_session {nullptr};
+    winrt::Windows::Graphics::SizeInt32 m_lastSize;
+
+    winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice m_device {nullptr};
+    winrt::com_ptr<IDXGISwapChain1> m_swapChain {nullptr};
+    winrt::com_ptr<ID3D11DeviceContext> m_d3dContext {nullptr};
+
+    std::atomic<bool> m_closed = false;
+    winrt::Windows::Graphics::Capture::Direct3D11CaptureFramePool::FrameArrived_revoker m_frameArrived;
+    AVFrame* m_xrgbFrame = nullptr;
+    AVFrame* m_nv12Frame = nullptr;
+    BufferFiller m_xrgbBuffers;
+    BufferFiller m_nv12Buffers;
+    D3dConverter m_rgbToNv12;
+    _PixType m_pixType;
+    bool m_isCapture = true;
+    int m_cnt = 5;
+};

+ 24 - 0
AvRecorder/capturer/video/wgc/capture.interop.h

@@ -0,0 +1,24 @@
+#pragma once
+#include <guiddef.h>
+
+#include <winrt/Windows.Graphics.Capture.h>
+#include <windows.graphics.capture.interop.h>
+#include <windows.graphics.capture.h>
+
+inline auto CreateCaptureItemForWindow(HWND hwnd)
+{
+    auto activation_factory = winrt::get_activation_factory<winrt::Windows::Graphics::Capture::GraphicsCaptureItem>();
+    auto interop_factory = activation_factory.as<IGraphicsCaptureItemInterop>();
+    winrt::Windows::Graphics::Capture::GraphicsCaptureItem item = {nullptr};
+    interop_factory->CreateForWindow(hwnd, winrt::guid_of<ABI::Windows::Graphics::Capture::IGraphicsCaptureItem>(), reinterpret_cast<void**>(winrt::put_abi(item)));
+    return item;
+}
+
+inline auto CreateCaptureItemForMonitor(HMONITOR monitor)
+{
+    auto activation_factory = winrt::get_activation_factory<winrt::Windows::Graphics::Capture::GraphicsCaptureItem>();
+    auto interop_factory = activation_factory.as<IGraphicsCaptureItemInterop>();
+    winrt::Windows::Graphics::Capture::GraphicsCaptureItem item = {nullptr};
+    interop_factory->CreateForMonitor(monitor, winrt::guid_of<ABI::Windows::Graphics::Capture::IGraphicsCaptureItem>(), reinterpret_cast<void**>(winrt::put_abi(item)));
+    return item;
+}

+ 61 - 0
AvRecorder/capturer/video/wgc/composition.interop.h

@@ -0,0 +1,61 @@
+#pragma once
+#include <guiddef.h>
+
+#include <winrt/Windows.UI.Composition.h>
+#include <windows.ui.composition.interop.h>
+#include <d2d1_1.h>
+
+inline auto CreateCompositionGraphicsDevice(
+    winrt::Windows::UI::Composition::Compositor const& compositor,
+    ::IUnknown* device)
+{
+    winrt::Windows::UI::Composition::CompositionGraphicsDevice graphicsDevice{ nullptr };
+    auto compositorInterop = compositor.as<ABI::Windows::UI::Composition::ICompositorInterop>();
+    winrt::com_ptr<ABI::Windows::UI::Composition::ICompositionGraphicsDevice> graphicsInterop;
+    winrt::check_hresult(compositorInterop->CreateGraphicsDevice(device, graphicsInterop.put()));
+    winrt::check_hresult(graphicsInterop->QueryInterface(winrt::guid_of<winrt::Windows::UI::Composition::CompositionGraphicsDevice>(),
+        reinterpret_cast<void**>(winrt::put_abi(graphicsDevice))));
+    return graphicsDevice;
+}
+
+inline void ResizeSurface(
+    winrt::Windows::UI::Composition::CompositionDrawingSurface const& surface,
+    winrt::Windows::Foundation::Size const& size)
+{
+    auto surfaceInterop = surface.as<ABI::Windows::UI::Composition::ICompositionDrawingSurfaceInterop>();
+    SIZE newSize = {};
+    newSize.cx = static_cast<LONG>(std::round(size.Width));
+    newSize.cy = static_cast<LONG>(std::round(size.Height));
+    winrt::check_hresult(surfaceInterop->Resize(newSize));
+}
+
+inline auto SurfaceBeginDraw(
+    winrt::Windows::UI::Composition::CompositionDrawingSurface const& surface)
+{
+    auto surfaceInterop = surface.as<ABI::Windows::UI::Composition::ICompositionDrawingSurfaceInterop>();
+    winrt::com_ptr<ID2D1DeviceContext> context;
+    POINT offset = {};
+    winrt::check_hresult(surfaceInterop->BeginDraw(nullptr, __uuidof(ID2D1DeviceContext), context.put_void(), &offset));
+    context->SetTransform(D2D1::Matrix3x2F::Translation((FLOAT)offset.x,(FLOAT) offset.y));
+    return context;
+}
+
+inline void SurfaceEndDraw(
+    winrt::Windows::UI::Composition::CompositionDrawingSurface const& surface)
+{
+    auto surfaceInterop = surface.as<ABI::Windows::UI::Composition::ICompositionDrawingSurfaceInterop>();
+    winrt::check_hresult(surfaceInterop->EndDraw());
+}
+
+inline auto CreateCompositionSurfaceForSwapChain(
+    winrt::Windows::UI::Composition::Compositor const& compositor,
+    ::IUnknown* swapChain)
+{
+    winrt::Windows::UI::Composition::ICompositionSurface surface{ nullptr };
+    auto compositorInterop = compositor.as<ABI::Windows::UI::Composition::ICompositorInterop>();
+    winrt::com_ptr<ABI::Windows::UI::Composition::ICompositionSurface> surfaceInterop;
+    winrt::check_hresult(compositorInterop->CreateCompositionSurfaceForSwapChain(swapChain, surfaceInterop.put()));
+    winrt::check_hresult(surfaceInterop->QueryInterface(winrt::guid_of<winrt::Windows::UI::Composition::ICompositionSurface>(),
+        reinterpret_cast<void**>(winrt::put_abi(surface))));
+    return surface;
+}

+ 173 - 0
AvRecorder/capturer/video/wgc/d3dHelpers.h

@@ -0,0 +1,173 @@
+#pragma once
+
+#include "composition.interop.h"
+
+struct SurfaceContext
+{
+public:
+    SurfaceContext(std::nullptr_t) {}
+    SurfaceContext(
+        winrt::Windows::UI::Composition::CompositionDrawingSurface surface)
+    {
+        m_surface = surface;
+        m_d2dContext = SurfaceBeginDraw(m_surface);
+    }
+    ~SurfaceContext()
+    {
+        SurfaceEndDraw(m_surface);
+        m_d2dContext = nullptr;
+        m_surface = nullptr;
+    }
+
+    winrt::com_ptr<ID2D1DeviceContext> GetDeviceContext() { return m_d2dContext; }
+
+private:
+    winrt::com_ptr<ID2D1DeviceContext> m_d2dContext;
+    winrt::Windows::UI::Composition::CompositionDrawingSurface m_surface{ nullptr };
+};
+
+struct D3D11DeviceLock
+{
+public:
+    D3D11DeviceLock(std::nullopt_t) {}
+    D3D11DeviceLock(ID3D11Multithread* pMultithread)
+    {
+        m_multithread.copy_from(pMultithread);
+        m_multithread->Enter();
+    }
+    ~D3D11DeviceLock()
+    {
+        m_multithread->Leave();
+        m_multithread = nullptr;
+    }
+private:
+    winrt::com_ptr<ID3D11Multithread> m_multithread;
+};
+
+inline auto
+CreateWICFactory()
+{
+    winrt::com_ptr<IWICImagingFactory2> wicFactory;
+    winrt::check_hresult(
+        ::CoCreateInstance(
+            CLSID_WICImagingFactory,
+            nullptr,
+            CLSCTX_INPROC_SERVER,
+            winrt::guid_of<IWICImagingFactory>(),
+            wicFactory.put_void()));
+
+    return wicFactory;
+}
+
+inline auto
+CreateD2DDevice(
+    winrt::com_ptr<ID2D1Factory1> const& factory,
+    winrt::com_ptr<ID3D11Device> const& device)
+{
+    winrt::com_ptr<ID2D1Device> result;
+    winrt::check_hresult(factory->CreateDevice(device.as<IDXGIDevice>().get(), result.put()));
+    return result;
+}
+
+inline auto
+CreateD3DDevice(
+    D3D_DRIVER_TYPE const type,
+    winrt::com_ptr<ID3D11Device>& device)
+{
+    WINRT_ASSERT(!device);
+
+    UINT flags = D3D11_CREATE_DEVICE_BGRA_SUPPORT;
+
+//#ifdef _DEBUG
+//	flags |= D3D11_CREATE_DEVICE_DEBUG;
+//#endif
+
+    return D3D11CreateDevice(
+        nullptr,
+        type,
+        nullptr,
+        flags,
+        nullptr, 0,
+        D3D11_SDK_VERSION,
+        device.put(),
+        nullptr,
+        nullptr);
+}
+
+inline auto
+CreateD3DDevice()
+{
+    winrt::com_ptr<ID3D11Device> device;
+    HRESULT hr = CreateD3DDevice(D3D_DRIVER_TYPE_HARDWARE, device);
+
+    if (DXGI_ERROR_UNSUPPORTED == hr)
+    {
+        hr = CreateD3DDevice(D3D_DRIVER_TYPE_WARP, device);
+    }
+
+    winrt::check_hresult(hr);
+    return device;
+}
+
+inline auto
+CreateD2DFactory()
+{
+    D2D1_FACTORY_OPTIONS options{};
+
+//#ifdef _DEBUG
+//	options.debugLevel = D2D1_DEBUG_LEVEL_INFORMATION;
+//#endif
+
+    winrt::com_ptr<ID2D1Factory1> factory;
+
+    winrt::check_hresult(D2D1CreateFactory(
+        D2D1_FACTORY_TYPE_SINGLE_THREADED,
+        options,
+        factory.put()));
+
+    return factory;
+}
+
+inline auto
+CreateDXGISwapChain(
+    winrt::com_ptr<ID3D11Device> const& device,
+    const DXGI_SWAP_CHAIN_DESC1* desc)
+{
+    auto dxgiDevice = device.as<IDXGIDevice2>();
+    winrt::com_ptr<IDXGIAdapter> adapter;
+    winrt::check_hresult(dxgiDevice->GetParent(winrt::guid_of<IDXGIAdapter>(), adapter.put_void()));
+    winrt::com_ptr<IDXGIFactory2> factory;
+    winrt::check_hresult(adapter->GetParent(winrt::guid_of<IDXGIFactory2>(), factory.put_void()));
+
+    winrt::com_ptr<IDXGISwapChain1> swapchain;
+    winrt::check_hresult(factory->CreateSwapChainForComposition(
+        device.get(),
+        desc,
+        nullptr,
+        swapchain.put()));
+
+    return swapchain;
+}
+
+inline auto
+CreateDXGISwapChain(
+    winrt::com_ptr<ID3D11Device> const& device,
+    uint32_t width,
+    uint32_t height,
+    DXGI_FORMAT format,
+    uint32_t bufferCount)
+{
+    DXGI_SWAP_CHAIN_DESC1 desc = {};
+    desc.Width = width;
+    desc.Height = height;
+    desc.Format = format;
+    desc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
+    desc.SampleDesc.Count = 1;
+    desc.SampleDesc.Quality = 0;
+    desc.BufferCount = bufferCount;
+    desc.Scaling = DXGI_SCALING_STRETCH;
+    desc.SwapEffect = DXGI_SWAP_EFFECT_FLIP_SEQUENTIAL;
+    desc.AlphaMode = DXGI_ALPHA_MODE_PREMULTIPLIED;
+
+    return CreateDXGISwapChain(device, &desc);
+}

+ 40 - 0
AvRecorder/capturer/video/wgc/direct3d11.interop.h

@@ -0,0 +1,40 @@
+#pragma once
+#include <winrt/windows.graphics.directx.direct3d11.h>
+
+extern "C"
+{
+    HRESULT __stdcall CreateDirect3D11DeviceFromDXGIDevice(::IDXGIDevice* dxgiDevice,
+        ::IInspectable** graphicsDevice);
+
+    HRESULT __stdcall CreateDirect3D11SurfaceFromDXGISurface(::IDXGISurface* dgxiSurface,
+        ::IInspectable** graphicsSurface);
+}
+
+struct __declspec(uuid("A9B3D012-3DF2-4EE3-B8D1-8695F457D3C1"))
+    IDirect3DDxgiInterfaceAccess : ::IUnknown
+{
+    virtual HRESULT __stdcall GetInterface(GUID const& id, void** object) = 0;
+};
+
+inline auto CreateDirect3DDevice(IDXGIDevice* dxgi_device)
+{
+    winrt::com_ptr<::IInspectable> d3d_device;
+    winrt::check_hresult(CreateDirect3D11DeviceFromDXGIDevice(dxgi_device, d3d_device.put()));
+    return d3d_device.as<winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice>();
+}
+
+inline auto CreateDirect3DSurface(IDXGISurface* dxgi_surface)
+{
+    winrt::com_ptr<::IInspectable> d3d_surface;
+    winrt::check_hresult(CreateDirect3D11SurfaceFromDXGISurface(dxgi_surface, d3d_surface.put()));
+    return d3d_surface.as<winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DSurface>();
+}
+
+template <typename T>
+auto GetDXGIInterfaceFromObject(winrt::Windows::Foundation::IInspectable const& object)
+{
+    auto access = object.as<IDirect3DDxgiInterfaceAccess>();
+    winrt::com_ptr<T> result;
+    winrt::check_hresult(access->GetInterface(winrt::guid_of<T>(), result.put_void()));
+    return result;
+}

+ 9 - 0
AvRecorder/capturer/video/wgc/pch.cpp

@@ -0,0 +1,9 @@
+
+// D3D
+#include <d3d11_4.h>
+#include <dxgi1_6.h>
+#include <d2d1_3.h>
+#include <wincodec.h>
+
+
+#include "pch.h"

+ 34 - 0
AvRecorder/capturer/video/wgc/pch.h

@@ -0,0 +1,34 @@
+#pragma once
+#include <Unknwn.h>
+#include <inspectable.h>
+
+// WinRT
+
+#include <winrt/Windows.Foundation.h>
+#include <winrt/Windows.Graphics.Capture.h>
+#include <winrt/Windows.Graphics.DirectX.Direct3d11.h>
+#include <winrt/Windows.Graphics.DirectX.h>
+#include <winrt/Windows.System.h>
+#include <winrt/Windows.UI.Composition.Desktop.h>
+#include <winrt/Windows.UI.Composition.h>
+#include <winrt/Windows.UI.Popups.h>
+#include <winrt/Windows.UI.h>
+
+#include <windows.ui.composition.interop.h>
+#include <DispatcherQueue.h>
+
+// STL
+#include <atomic>
+#include <memory>
+
+// D3D
+#include <d3d11_4.h>
+#include <dxgi1_6.h>
+#include <d2d1_3.h>
+#include <wincodec.h>
+
+// Helpers
+#include "composition.interop.h"
+#include "d3dHelpers.h"
+#include "direct3d11.interop.h"
+#include "capture.interop.h"

+ 32 - 0
AvRecorder/capturer/video/wgc/winrt.cpp

@@ -0,0 +1,32 @@
+#include "pch.h"
+
+#include <ShObjIdl.h>
+
+using namespace winrt;
+using namespace Windows::UI;
+using namespace Windows::UI::Composition;
+using namespace Windows::UI::Composition::Desktop;
+
+// Direct3D11CaptureFramePool requires a DispatcherQueue
+winrt::Windows::System::DispatcherQueueController CreateDispatcherQueueController()
+{
+    namespace abi = ABI::Windows::System;
+
+    DispatcherQueueOptions options {
+        sizeof(DispatcherQueueOptions),
+        DQTYPE_THREAD_CURRENT,
+        DQTAT_COM_STA};
+
+    Windows::System::DispatcherQueueController controller {nullptr};
+    check_hresult(CreateDispatcherQueueController(options, reinterpret_cast<abi::IDispatcherQueueController**>(put_abi(controller))));
+    return controller;
+}
+
+DesktopWindowTarget CreateDesktopWindowTarget(Compositor const& compositor, HWND window)
+{
+    namespace abi = ABI::Windows::UI::Composition::Desktop;
+    auto interop = compositor.as<abi::ICompositorDesktopInterop>();
+    DesktopWindowTarget target {nullptr};
+    check_hresult(interop->CreateDesktopWindowTarget(window, true, reinterpret_cast<abi::IDesktopWindowTarget**>(put_abi(target))));
+    return target;
+}

+ 24 - 0
AvRecorder/capturer/video/wgc/winrt.h

@@ -0,0 +1,24 @@
+#pragma once
+
+// WinRT
+#include <winrt/Windows.Foundation.h>
+#include <winrt/Windows.System.h>
+#include <winrt/Windows.UI.h>
+#include <winrt/Windows.UI.Composition.h>
+#include <winrt/Windows.UI.Composition.Desktop.h>
+#include <winrt/Windows.UI.Popups.h>
+#include <winrt/Windows.Graphics.Capture.h>
+#include <winrt/Windows.Graphics.DirectX.h>
+#include <winrt/Windows.Graphics.DirectX.Direct3d11.h>
+
+#include <windows.ui.composition.interop.h>
+#include <DispatcherQueue.h>
+
+using namespace winrt;
+using namespace Windows::UI;
+using namespace Windows::UI::Composition;
+using namespace Windows::UI::Composition::Desktop;
+
+// Direct3D11CaptureFramePool requires a DispatcherQueue
+winrt::Windows::System::DispatcherQueueController CreateDispatcherQueueController();
+DesktopWindowTarget CreateDesktopWindowTarget(Compositor const& compositor, HWND window);

+ 97 - 0
AvRecorder/capturer/video/wgc_capturer.cpp

@@ -0,0 +1,97 @@
+#include "wgc_capturer.h"
+
+#include "wgc/winrt.h"
+
+#include <QWidget>
+
+winrt::Windows::System::DispatcherQueue* WgcCapturer::queuePtr = nullptr;
+winrt::Windows::UI::Composition::ContainerVisual* WgcCapturer::rootPtr = nullptr;
+std::list<WgcCapturer*> WgcCapturer::_capturers;
+QWidget* __widget = nullptr;
+
+void WgcCapturer::Init()
+{
+    if (queuePtr != nullptr) {
+        return;
+    }
+    // Init COM
+    init_apartment(apartment_type::single_threaded);
+    // Create a DispatcherQueue for our thread
+    static auto controller = CreateDispatcherQueueController();
+    // Initialize Composition
+    static auto compositor = Compositor();
+    __widget = new QWidget;
+    __widget->hide();
+    // __widget->resize(800, 600);
+    // __widget->show();
+    static auto target = CreateDesktopWindowTarget(compositor, (HWND)__widget->winId());
+    static auto root = compositor.CreateContainerVisual();
+    root.RelativeSizeAdjustment({1.0f, 1.0f});
+    target.Root(root);
+
+    // Enqueue our capture work on the dispatcher
+    static auto queue = controller.DispatcherQueue();
+    queuePtr = &queue;
+    rootPtr = &root;
+    // 首先 New 一个 Capturer 备用
+    New();
+}
+
+void WgcCapturer::Uninit()
+{
+    delete __widget;
+    while (!_capturers.empty()) {
+        delete *_capturers.begin();
+        _capturers.erase(_capturers.begin());
+    }
+}
+
+WgcCapturer* WgcCapturer::New()
+{
+    // 将上一个 new 好的对象返回,并重新预备一个新的
+    if (_capturers.empty()) {
+        _capturers.push_back(new WgcCapturer);
+    }
+    return *(--_capturers.end());
+}
+
+void WgcCapturer::Delete(WgcCapturer* ptr)
+{
+    // auto iter = std::find(_capturers.begin(), _capturers.end(), ptr);
+    // if (iter == _capturers.end()) {
+    //     return;
+    // }
+    // if (*iter != nullptr) {
+    //     delete *iter;
+    // }
+    // _capturers.erase(iter);
+}
+
+WgcCapturer::WgcCapturer()
+{
+    _app = new App;
+    _isAppInit = false;
+    auto success = queuePtr->TryEnqueue([=]() -> void {
+        _app->Initialize(*rootPtr);
+        _isAppInit = true;
+    });
+    WINRT_VERIFY(success);
+}
+
+WgcCapturer::~WgcCapturer()
+{
+    if (_app) {
+        delete _app;
+        _app = nullptr;
+    }
+}
+
+bool WgcCapturer::StartCapturerMonitor(HMONITOR monitor, int width, int height)
+{
+    return _app->StartCaptureMonitor(monitor, width, height);
+}
+
+bool WgcCapturer::StartCapturerWindow(HWND hwnd, int width, int height)
+{
+    return _app->StartCaptureWindow(hwnd, width, height);
+}

+ 35 - 0
AvRecorder/capturer/video/wgc_capturer.h

@@ -0,0 +1,35 @@
+#ifndef __WGC_CAPTURER_H__
+#define __WGC_CAPTURER_H__
+
+#include "wgc/pch.h"
+
+#include "wgc/App.h"
+#include <list>
+
+class WgcCapturer {
+public:
+    bool StartCapturerWindow(HWND hwnd, int width, int height);
+    bool StartCapturerMonitor(HMONITOR monitor, int width, int height);
+    void SetDrawCursor(bool isDrawCursor) { _app->SetDrawCursor(isDrawCursor); }
+    static void Init();
+    static WgcCapturer* New();
+    static void Delete(WgcCapturer* ptr);
+    static void Uninit();
+    void Close()
+    {
+        if (_app != nullptr) {
+            _app->Close();
+        }
+    }
+    AVFrame* GetFrame() { return _app->GetFrame(); }
+
+private:
+    WgcCapturer();
+    ~WgcCapturer();
+    App* _app = nullptr;
+    bool _isAppInit = false;
+    static std::list<WgcCapturer*> _capturers;
+    static winrt::Windows::System::DispatcherQueue* queuePtr;
+    static winrt::Windows::UI::Composition::ContainerVisual* rootPtr;
+};
+#endif

+ 60 - 0
AvRecorder/d3d/buffer_filler.cpp

@@ -0,0 +1,60 @@
+#include "buffer_filler.h"
+#include "basic/basic.h"
+
+bool BufferFiller::Fill(ID3D11Device* device, D3D11_TEXTURE2D_DESC desc, int maxCnt)
+{
+    // 设置通用的纹理属性
+    desc.ArraySize = 1;
+    desc.BindFlags = 0;
+    desc.MiscFlags = 0;
+    desc.SampleDesc.Count = 1;
+    desc.SampleDesc.Quality = 0;
+    desc.MipLevels = 1;
+    desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
+    desc.Usage = D3D11_USAGE_STAGING;
+
+    // 如果已达到最大缓冲区数量,则替换现有缓冲区
+    if (_buffers.size() == maxCnt) {
+        ID3D11Texture2D* dstImg = nullptr;
+        if (FAILED(device->CreateTexture2D(&desc, nullptr, &dstImg))) {
+            return false;
+        }
+        // 释放旧的缓冲区并替换
+        if (_buffers[_mapIdx]) {
+            _buffers[_mapIdx]->Release();
+        }
+        _buffers[_mapIdx] = dstImg;
+        _mapIdx = (_mapIdx + 1) % _buffers.size();
+        return true;
+    }
+
+    // 否则创建新的缓冲区直到达到最大数量
+    while (_buffers.size() < maxCnt) {
+        ID3D11Texture2D* dstImg = nullptr;
+        if (FAILED(device->CreateTexture2D(&desc, nullptr, &dstImg))) {
+            break;
+        }
+        _buffers.push_back(dstImg);
+    }
+
+    __CheckBool(!_buffers.empty());
+    _copyIdx = 0;
+    _mapIdx = (_copyIdx + 1) % _buffers.size();
+    return true;
+}
+
+bool BufferFiller::Reset()
+{
+    _buffers[_mapIdx]->Release();
+    _buffers[_mapIdx] = nullptr;
+    _copyIdx = (_copyIdx + 1) % _buffers.size();
+    return true;
+}
+
+void BufferFiller::Clear()
+{
+    for (auto&& dstImg : _buffers) {
+        Free(dstImg, [&dstImg] { dstImg->Release(); });
+    }
+    _buffers.clear();
+}

+ 24 - 0
AvRecorder/d3d/buffer_filler.h

@@ -0,0 +1,24 @@
+#ifndef __BUFFER_FILLER_H__
+#define __BUFFER_FILLER_H__
+#include <d3d11.h>
+#include <vector>
+
+class BufferFiller {
+public:
+    bool Fill(ID3D11Device* device, D3D11_TEXTURE2D_DESC desc, int maxCnt = 3);
+    bool Reset();
+    ID3D11Texture2D* GetCopy() { return _buffers[_copyIdx]; }
+    ID3D11Texture2D* GetMap() { return _buffers[_mapIdx]; }
+    void Clear();
+    ~BufferFiller()
+    {
+        Clear();
+    }
+
+private:
+    int _mapIdx = 0;
+    int _copyIdx = 0;
+    std::vector<ID3D11Texture2D*> _buffers;
+};
+
+#endif

+ 131 - 0
AvRecorder/d3d/convert.cpp

@@ -0,0 +1,131 @@
+
+#include "convert.h"
+using namespace std;
+
+#if !defined(SAFE_RELEASE)
+#define SAFE_RELEASE(X) \
+    if (X) {            \
+        X->Release();   \
+        X = nullptr;    \
+    }
+#endif
+
+#if !defined(PRINTERR1)
+#define PRINTERR1(x) printf(__FUNCTION__ ": Error 0x%08x at line %d in file %s\n", x, __LINE__, __FILE__);
+#endif
+
+#if !defined(PRINTERR)
+#define PRINTERR(x, y) printf(__FUNCTION__ ": Error 0x%08x in %s at line %d in file %s\n", x, y, __LINE__, __FILE__);
+#endif
+
+/// Initialize Video Context
+HRESULT D3dConverter::Open(ID3D11Device* pDev, ID3D11DeviceContext* pCtx,
+    const D3D11_VIDEO_PROCESSOR_COLOR_SPACE& inColorSpace, D3D11_VIDEO_PROCESSOR_COLOR_SPACE& outColorSpace)
+{
+    m_pDev = pDev;
+    m_pCtx = pCtx;
+    m_pDev->AddRef();
+    m_pCtx->AddRef();
+    /// Obtain Video device and Video device context
+    HRESULT hr = m_pDev->QueryInterface(__uuidof(ID3D11VideoDevice), (void**)&m_pVid);
+    if (FAILED(hr)) {
+        PRINTERR(hr, "QAI for ID3D11VideoDevice");
+    }
+    hr = m_pCtx->QueryInterface(__uuidof(ID3D11VideoContext), (void**)&m_pVidCtx);
+    if (FAILED(hr)) {
+        PRINTERR(hr, "QAI for ID3D11VideoContext");
+    }
+    _inColorSpace = inColorSpace;
+    _outColorSpace = outColorSpace;
+    return hr;
+}
+
+/// Release all Resources
+void D3dConverter::Close()
+{
+    for (auto& it : viewMap) {
+        ID3D11VideoProcessorOutputView* pVPOV = it.second;
+        pVPOV->Release();
+    }
+    SAFE_RELEASE(m_pVP);
+    SAFE_RELEASE(m_pVPEnum);
+    SAFE_RELEASE(m_pVidCtx);
+    SAFE_RELEASE(m_pVid);
+    SAFE_RELEASE(m_pCtx);
+    SAFE_RELEASE(m_pDev);
+}
+
+/// Perform Colorspace conversion
+HRESULT D3dConverter::Convert(ID3D11Texture2D* pIn, ID3D11Texture2D* pOut)
+{
+    HRESULT hr = S_OK;
+
+    D3D11_TEXTURE2D_DESC inDesc = {0};
+    D3D11_TEXTURE2D_DESC outDesc = {0};
+    pIn->GetDesc(&inDesc);
+    pOut->GetDesc(&outDesc);
+
+    /// Check if VideoProcessor needs to be reconfigured
+    /// Reconfiguration is required if input/output dimensions have changed
+    if (m_pVP) {
+        if (m_inDesc.Width != inDesc.Width || m_inDesc.Height != inDesc.Height || m_outDesc.Width != outDesc.Width || m_outDesc.Height != outDesc.Height) {
+            SAFE_RELEASE(m_pVPEnum);
+            SAFE_RELEASE(m_pVP);
+        }
+    }
+
+    if (!m_pVP) {
+        /// Initialize Video Processor
+        m_inDesc = inDesc;
+        m_outDesc = outDesc;
+        D3D11_VIDEO_PROCESSOR_CONTENT_DESC contentDesc = {
+            D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE,
+            {0, 0}, inDesc.Width, inDesc.Height,
+            {0, 0}, outDesc.Width, outDesc.Height,
+            D3D11_VIDEO_USAGE_PLAYBACK_NORMAL};
+        hr = m_pVid->CreateVideoProcessorEnumerator(&contentDesc, &m_pVPEnum);
+        if (FAILED(hr)) {
+            PRINTERR(hr, "CreateVideoProcessorEnumerator");
+        }
+        hr = m_pVid->CreateVideoProcessor(m_pVPEnum, 0, &m_pVP);
+        if (FAILED(hr)) {
+            PRINTERR(hr, "CreateVideoProcessor");
+        }
+
+        m_pVidCtx->VideoProcessorSetStreamColorSpace(m_pVP, 0, &_inColorSpace);
+        m_pVidCtx->VideoProcessorSetOutputColorSpace(m_pVP, &_outColorSpace);
+    }
+
+    /// Obtain Video Processor Input view from input texture
+    ID3D11VideoProcessorInputView* pVPIn = nullptr;
+    D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC inputVD = {0, D3D11_VPIV_DIMENSION_TEXTURE2D, {0, 0}};
+    hr = m_pVid->CreateVideoProcessorInputView(pIn, m_pVPEnum, &inputVD, &pVPIn);
+    if (FAILED(hr)) {
+        PRINTERR(hr, "CreateVideoProcessInputView");
+        return hr;
+    }
+
+    /// Obtain Video Processor Output view from output texture
+    ID3D11VideoProcessorOutputView* pVPOV = nullptr;
+    D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC ovD = {D3D11_VPOV_DIMENSION_TEXTURE2D};
+    hr = m_pVid->CreateVideoProcessorOutputView(pOut, m_pVPEnum, &ovD, &pVPOV);
+    if (FAILED(hr)) {
+        SAFE_RELEASE(pVPIn);
+        PRINTERR(hr, "CreateVideoProcessorOutputView");
+        return hr;
+    }
+
+    /// Create a Video Processor Stream to run the operation
+    D3D11_VIDEO_PROCESSOR_STREAM stream = {TRUE, 0, 0, 0, 0, nullptr, pVPIn, nullptr};
+
+    /// Perform the Colorspace conversion
+    hr = m_pVidCtx->VideoProcessorBlt(m_pVP, pVPOV, 0, 1, &stream);
+    if (FAILED(hr)) {
+        SAFE_RELEASE(pVPIn);
+        PRINTERR(hr, "VideoProcessorBlt");
+        return hr;
+    }
+    SAFE_RELEASE(pVPIn);
+    SAFE_RELEASE(pVPOV);
+    return hr;
+}

+ 80 - 0
AvRecorder/d3d/convert.h

@@ -0,0 +1,80 @@
+/*
+ * Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in the
+ *    documentation and/or other materials provided with the distribution.
+ *  * Neither the name of NVIDIA CORPORATION nor the names of its
+ *    contributors may be used to endorse or promote products derived
+ *    from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#pragma once
+#include <dxgi1_2.h>
+#include <d3d11_2.h>
+#include <unordered_map>
+
+class D3dConverter {
+    /// Simple Preprocessor class
+    /// Uses DXVAHD VideoProcessBlt to perform colorspace conversion
+private:
+    /// D3D11 device to be used for Processing
+    ID3D11Device* m_pDev = nullptr;
+    /// D3D11 device context to be used for Processing
+    ID3D11DeviceContext* m_pCtx = nullptr;
+    /// D3D11 video device to be used for Processing, obtained from d3d11 device
+    ID3D11VideoDevice* m_pVid = nullptr;
+    /// D3D11 video device context to be used for Processing, obtained from d3d11 device
+    ID3D11VideoContext* m_pVidCtx = nullptr;
+    /// DXVAHD video processor configured for processing.
+    /// Needs to be reconfigured based on input and output textures for each Convert() call
+    ID3D11VideoProcessor* m_pVP = nullptr;
+    /// DXVAHD VpBlt output target. Obtained from the output texture passed to Convert()
+    ID3D11VideoProcessorOutputView* m_pVPOut = nullptr;
+    /// D3D11 video processor enumerator. Required to configure Video processor streams
+    ID3D11VideoProcessorEnumerator* m_pVPEnum = nullptr;
+    /// Mapping of Texture2D handle and corresponding Video Processor output view handle
+    /// Optimization to avoid having to create video processor output views in each Convert() call
+    std::unordered_map<ID3D11Texture2D*, ID3D11VideoProcessorOutputView*> viewMap;
+    /// Input and Output Texture2D properties.
+    /// Required to optimize Video Processor stream usage
+    D3D11_TEXTURE2D_DESC m_inDesc = {0};
+    D3D11_TEXTURE2D_DESC m_outDesc = {0};
+    D3D11_VIDEO_PROCESSOR_COLOR_SPACE _inColorSpace;
+    D3D11_VIDEO_PROCESSOR_COLOR_SPACE _outColorSpace;
+
+public:
+    /// Initialize Video Context
+    HRESULT Open(ID3D11Device* pDev, ID3D11DeviceContext* pCtx,
+        const D3D11_VIDEO_PROCESSOR_COLOR_SPACE& inColorSpace, D3D11_VIDEO_PROCESSOR_COLOR_SPACE& outColorSpace);
+    /// Perform Colorspace conversion
+    HRESULT Convert(ID3D11Texture2D* pIn, ID3D11Texture2D* pOut);
+    /// Release all resources
+    void Close();
+
+public:
+    /// Constructor
+    // RGBToNV12(ID3D11Device *pDev, ID3D11DeviceContext *pCtx);
+    /// Destructor. Release all resources before destroying object
+    ~D3dConverter()
+    {
+        Close();
+    }
+};

+ 81 - 0
AvRecorder/d3d/gen_frame.cpp

@@ -0,0 +1,81 @@
+#include "gen_frame.h"
+
+#include <winrt/base.h>
+
+#undef min
+#undef max
+
+bool GenNv12Frame(ID3D11Device* device,
+                  ID3D11DeviceContext* ctx,
+                  const D3D11_TEXTURE2D_DESC& desc,
+                  ID3D11Texture2D* img,
+                  BufferFiller& buffers,
+                  AVFrame*& outFrame,
+                  D3dConverter& rgbToNv12)
+{
+    winrt::com_ptr<ID3D11Texture2D> nv12Img = nullptr;
+    if (FAILED(device->CreateTexture2D(&desc, nullptr, nv12Img.put()))) {
+        return false;
+    }
+    __CheckBool(SUCCEEDED(rgbToNv12.Convert(img, nv12Img.get())));
+    // 填充缓冲区
+    __CheckBool(buffers.Fill(device, desc));
+
+    ctx->CopyResource(buffers.GetCopy(), nv12Img.get());
+    D3D11_MAPPED_SUBRESOURCE resource;
+    __CheckBool(SUCCEEDED(ctx->Map(buffers.GetMap(), 0, D3D11_MAP_READ, 0, &resource)));
+    auto height = std::min(outFrame->height, (int)desc.Height);
+    auto width = outFrame->width;
+    auto srcLinesize = resource.RowPitch;
+    auto dstLinesize = outFrame->linesize[0];
+    auto srcData = (uint8_t*)resource.pData;
+    auto titleHeight = std::max(int(desc.Height - height), 0);
+    /* auto copyLine = std::min(std::min(width, (int) srcLinesize), dstLinesize);*/
+    auto border = (desc.Width - width) / 2;
+    __mtx.lock();
+
+    // Y
+    int Ystart = (titleHeight - border) * srcLinesize + border;
+    auto dstData = outFrame->data[0];
+    for (int row = 0; row < height; ++row) {
+        memcpy(dstData + row * dstLinesize, srcData + Ystart + row * srcLinesize, width);
+    }
+
+    // UV
+    dstData = outFrame->data[1];
+    int UVStart = srcLinesize * desc.Height + (titleHeight - border) / 2 * srcLinesize + border / 2 * 2;
+    for (int row = 0; row < height / 2; ++row) {
+        memcpy(dstData + row * dstLinesize, srcData + UVStart + row * srcLinesize, width);
+    }
+
+    __mtx.unlock();
+    ctx->Unmap(buffers.GetMap(), 0);
+    __CheckBool(buffers.Reset());
+    return true;
+}
+bool GenRgbFrame(ID3D11Device* device, ID3D11DeviceContext* ctx, const D3D11_TEXTURE2D_DESC& desc,
+    ID3D11Texture2D* img, BufferFiller& buffers, AVFrame*& outFrame)
+{
+    __CheckBool(buffers.Fill(device, desc));
+    ctx->CopyResource(buffers.GetCopy(), img);
+    D3D11_MAPPED_SUBRESOURCE resource;
+    __CheckBool(SUCCEEDED(ctx->Map(buffers.GetMap(), 0, D3D11_MAP_READ, 0, &resource)));
+    auto height = std::min(outFrame->height, (int)desc.Height);
+    auto width = outFrame->width;
+    auto srcLinesize = resource.RowPitch;
+    auto dstLinesize = outFrame->linesize[0];
+    auto srcData = (uint8_t*)resource.pData;
+    auto dstData = outFrame->data[0];
+    auto titleHeight = std::max(int(desc.Height - height), 0);
+    auto copyLine = std::min(std::min(width * 4, (int)srcLinesize), dstLinesize);
+    auto border = (desc.Width - width) / 2;
+    __mtx.lock();
+    for (int row = 0; row < height; ++row) {
+        auto offset = (titleHeight + row - border) * srcLinesize + border * 4;
+        memcpy(dstData + row * dstLinesize, srcData + offset, copyLine);
+    }
+    __mtx.unlock();
+    ctx->Unmap(buffers.GetMap(), 0);
+    __CheckBool(buffers.Reset());
+    return true;
+}

+ 14 - 0
AvRecorder/d3d/gen_frame.h

@@ -0,0 +1,14 @@
+#ifndef __GEN_FRAME_H__
+#define __GEN_FRAME_H__
+
+#include "buffer_filler.h"
+#include "convert.h"
+#include <d3d11.h>
+
+#include "basic/frame.h"
+
+bool GenNv12Frame(ID3D11Device* device, ID3D11DeviceContext* ctx, const D3D11_TEXTURE2D_DESC& desc,
+    ID3D11Texture2D* img, BufferFiller& buffers, AVFrame*& outFrame, D3dConverter& rgbToNv12);
+bool GenRgbFrame(ID3D11Device* device, ID3D11DeviceContext* ctx, const D3D11_TEXTURE2D_DESC& desc,
+    ID3D11Texture2D* img, BufferFiller& buffers, AVFrame*& outFrame);
+#endif

+ 14 - 0
AvRecorder/encoder/abstract_encoder.cpp

@@ -0,0 +1,14 @@
+
+#include "abstract_encoder.h"
+
+AVPacket* AbstractEncoder::Encode()
+{
+    int ret = avcodec_receive_packet(_codecCtx, _packet);
+    if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
+        return nullptr;
+    } else if (ret < 0) {
+        __DebugPrint("avcodec_receive_packet : Error during encoding");
+        return nullptr;
+    }
+    return _packet;
+}

+ 31 - 0
AvRecorder/encoder/abstract_encoder.h

@@ -0,0 +1,31 @@
+#ifndef __BASIC_ENCODER_H__
+#define __BASIC_ENCODER_H__
+
+#include "basic/basic.h"
+
+class AbstractEncoder
+{
+public:
+    AbstractEncoder() { _packet = av_packet_alloc(); }
+    AVCodecContext* GetCtx() const { return _codecCtx; }
+
+    virtual bool PushFrame(AVFrame* frame, bool isEnd, uint64_t pts) = 0;
+    AVPacket* Encode();
+    virtual void AfterEncode() {};
+    virtual void Close() = 0;
+    virtual ~AbstractEncoder()
+    {
+        Free(_packet, [this] { av_packet_free(&_packet); });
+    }
+
+protected:
+    bool _isOpen = false;
+    AVPacket* _packet = nullptr;
+    const AVCodec* _codec = nullptr;
+    AVCodecContext* _codecCtx = nullptr;
+};
+
+template<MediaType mediaType>
+class Encoder;
+
+#endif

+ 52 - 0
AvRecorder/encoder/audio_encoder.cpp

@@ -0,0 +1,52 @@
+
+#include "audio_encoder.h"
+
+bool Encoder<MediaType::AUDIO>::Open(const Param& audioParma, AVFormatContext* fmtCtx)
+{
+    Close();
+    _isOpen = false;
+    __CheckBool(_Init(audioParma, fmtCtx));
+    __CheckBool(avcodec_open2(_codecCtx, _codec, nullptr) >= 0);
+    _isOpen = true;
+    return true;
+}
+void Encoder<MediaType::AUDIO>::Close()
+{
+    if (_codecCtx != nullptr) {
+        avcodec_free_context(&_codecCtx);
+    }
+    Free(_codecCtx, [this] { avcodec_free_context(&_codecCtx); });
+}
+bool Encoder<MediaType::AUDIO>::_Init(const Param& audioParam, AVFormatContext* fmtCtx)
+{
+    // codec
+    __CheckBool(_codec = avcodec_find_encoder(AV_CODEC_ID_AAC));
+    // codeccontext
+    __CheckBool(_codecCtx = avcodec_alloc_context3(_codec));
+    _codecCtx->sample_fmt = AV_SAMPLE_FMT_FLTP;
+    _codecCtx->bit_rate = audioParam.bitRate;
+    _codecCtx->sample_rate = AUDIO_SAMPLE_RATE;
+    AVChannelLayout layout;
+    layout.order = AV_CHANNEL_ORDER_NATIVE;
+    layout.nb_channels = 1;
+    layout.u.mask = AV_CH_LAYOUT_MONO;
+    av_channel_layout_copy(&_codecCtx->ch_layout, &layout);
+    if (fmtCtx->oformat->flags & AVFMT_GLOBALHEADER) {
+        _codecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
+    }
+    return true;
+}
+
+bool Encoder<MediaType::AUDIO>::PushFrame(AVFrame* frame, bool isEnd, uint64_t pts)
+{
+    if (!isEnd) {
+        __CheckBool(frame);
+    } else {
+        frame = nullptr;
+    }
+    if (frame != nullptr) {
+        frame->pts = pts;
+    }
+    __CheckBool(avcodec_send_frame(_codecCtx, frame) >= 0);
+    return true;
+}

+ 24 - 0
AvRecorder/encoder/audio_encoder.h

@@ -0,0 +1,24 @@
+#ifndef __AUDIO_ENCODER_H__
+#define __AUDIO_ENCODER_H__
+
+#include "abstract_encoder.h"
+
+template<>
+class Encoder<MediaType::AUDIO> : public AbstractEncoder
+{
+public:
+    struct Param
+    {
+        int bitRate;
+    };
+    ~Encoder() { Close(); }
+
+    bool Open(const Param& audioParma, AVFormatContext* fmtCtx);
+    virtual void Close() override;
+    virtual bool PushFrame(AVFrame* frame, bool isEnd, uint64_t pts) override;
+
+private:
+    bool _Init(const Param& audioParam, AVFormatContext* fmtCtx);
+};
+
+#endif

+ 330 - 0
AvRecorder/encoder/audio_mixer.cpp

@@ -0,0 +1,330 @@
+#include "audio_mixer.h"
+
+#include "basic/basic.h"
+#include "basic/frame.h"
+
+#include <windows.h>
+
+AVSampleFormat BitsToFmt(int bits)
+{
+    switch (bits) {
+    case 8:
+        return AV_SAMPLE_FMT_U8;
+    case 16:
+        return AV_SAMPLE_FMT_S16;
+    case 32:
+        return AV_SAMPLE_FMT_S32;
+    case 64:
+        return AV_SAMPLE_FMT_S64;
+    default:
+        return AV_SAMPLE_FMT_FLT;
+    }
+}
+
+int FmtToBits(AVSampleFormat fmt)
+{
+    switch (fmt) {
+    case AV_SAMPLE_FMT_U8:
+        return 8;
+    case AV_SAMPLE_FMT_S16:
+        return 16;
+    case AV_SAMPLE_FMT_S32:
+        return 32;
+    case AV_SAMPLE_FMT_S64:
+        return 64;
+    default:
+        return 32;
+    }
+}
+
+int SizeToNbSamples(int size, int bitsPerSample, int nbChannels)
+{
+    return (size << 3) / bitsPerSample / nbChannels;
+}
+
+int NbSamplesToSize(int nbSamples, int bitsPerSample, int nbChannels)
+{
+    return (nbSamples * bitsPerSample * nbChannels) >> 3;
+}
+
+bool FrameQueue::Init(int channelNums, int sampleRate, AVSampleFormat fmt, int nbSamples)
+{
+    _front = 0;
+    _sampleRate = sampleRate;
+    _fmt = fmt;
+    _nbSamples = nbSamples;
+    _usedLinesize = nbSamples * channelNums * (fmt == AV_SAMPLE_FMT_S16 ? 2 : 4);
+    av_channel_layout_default(&_layout, channelNums);
+    _queue.emplace(fmt, &_layout, sampleRate, nbSamples);
+    return true;
+}
+
+Frame<MediaType::AUDIO> FrameQueue::Pop()
+{
+    if (_queue.size() > 1) {
+        auto frame = std::move(_queue.front());
+        _queue.pop();
+        return frame;
+    }
+    return Frame<MediaType::AUDIO>();
+}
+
+void FrameQueue::Push(uint8_t* data, int length)
+{
+    if (length > _usedLinesize) { // 递归调用
+        Push(data, length / 2);
+        Push(data + length / 2, length / 2 + length % 2);
+        return;
+    }
+    auto&& frame = _queue.back().frame;
+    int secondLength = _front + length - _usedLinesize;
+    if (secondLength <= 0) { // 第一段缓存是够用的
+        memcpy(frame->data[0] + _front, data, length);
+        _front += length;
+        return;
+    }
+    // 第一段缓存不够用
+    int firstLength = length - secondLength;
+    if (firstLength > 0) {
+        memcpy(frame->data[0] + _front, data, firstLength);
+    }
+    // 载入一段新缓存
+    _queue.emplace(_fmt, &_layout, _sampleRate, _nbSamples);
+    memcpy(_queue.back().frame->data[0], data + firstLength, secondLength);
+    _front = secondLength;
+}
+
+bool Resampler::Open(int inChannelNums,
+                     int inSampleRate,
+                     AVSampleFormat inFmt,
+                     int outChannelNums,
+                     int outSampleRate,
+                     AVSampleFormat outFmt,
+                     int outNbSample)
+{
+    Close();
+    __CheckBool(_swrCtx = swr_alloc());
+
+    AVChannelLayout tmpLayout;
+    av_channel_layout_default(&tmpLayout, inChannelNums);
+    av_opt_set_chlayout(_swrCtx, "in_chlayout", &tmpLayout, 0);
+    av_opt_set_int(_swrCtx, "in_sample_rate", inSampleRate, 0);
+    av_opt_set_sample_fmt(_swrCtx, "in_sample_fmt", inFmt, 0);
+    __CheckBool(_fromQueue.Init(inChannelNums, inSampleRate, inFmt, inSampleRate / 100 * 2));
+
+    av_channel_layout_default(&tmpLayout, outChannelNums);
+    av_opt_set_chlayout(_swrCtx, "out_chlayout", &tmpLayout, 0);
+    av_opt_set_int(_swrCtx, "out_sample_rate", outSampleRate, 0);
+    av_opt_set_sample_fmt(_swrCtx, "out_sample_fmt", outFmt, 0);
+    if (swr_init(_swrCtx) < 0) {
+        Close();
+        __DebugPrint("swr_init(_swrCtx) failed\n");
+        return false;
+    }
+    __CheckBool(_toQueue.Init(outChannelNums, outSampleRate, outFmt, outNbSample));
+    __CheckBool(_swrFrame = Frame<MediaType::AUDIO>::Alloc(outFmt,
+                                                           &tmpLayout,
+                                                           outSampleRate,
+                                                           outSampleRate / 100 * 2));
+
+    return true;
+}
+
+void Resampler::Close()
+{
+    Free(_swrCtx, [this] { swr_free(&_swrCtx); });
+    Free(_swrFrame, [this] { av_frame_free(&_swrFrame); });
+}
+
+bool Resampler::Convert(uint8_t* data, int size)
+{
+    std::vector<Frame<MediaType::AUDIO>> ret;
+    if (data == nullptr) {
+        return false;
+    }
+    _fromQueue.Push(data, size);
+    for (; true;) { // 转换
+        auto frame = _fromQueue.Pop();
+        if (frame.frame == nullptr) {
+            break;
+        }
+        __CheckBool(swr_convert(_swrCtx,
+                                _swrFrame->data,
+                                _swrFrame->nb_samples,
+                                (const uint8_t**) frame.frame->data,
+                                frame.frame->nb_samples)
+                    > 0);
+        _toQueue.Push(_swrFrame->data[0], _swrFrame->linesize[0]);
+    }
+    return true;
+}
+
+AVFrame* AudioMixer::Convert(uint32_t index, uint8_t* inBuf, uint32_t size)
+{
+    std::lock_guard<std::mutex> locker(_mutex);
+    auto iter = _audioInputInfos.find(index);
+    __CheckNullptr(iter != _audioInputInfos.end());
+    __CheckNullptr(iter->second.resampler->Convert(inBuf, size));
+    return _AdjustVolume() ? _outputFrame : nullptr;
+}
+
+bool AudioMixer::_AdjustVolume()
+{
+    // 检测所有流之间是不是相差太大了以及缓存的数据是不是太多了
+    // 如果缓存的数据太多,直接将所有的队列删除同样的数据
+    // 如果两个流直接数据相差太大,将多的那个减到和少的那个一样
+    constexpr int MAX_DIFF = 10;
+    constexpr int MAX_BUF_SIZE = 20;
+    int minSize = INT_MAX;
+    int maxSize = INT_MIN;
+    FrameQueue* maxQueue = nullptr;
+#undef min
+    for (auto&& iter : _audioInputInfos) {
+        auto&& queue = iter.second.resampler->GetQueue();
+        if (queue.IsEmpty()) {
+            return false;
+        }
+        minSize = std::min(minSize, (int) queue.GetSize());
+        if (maxSize < (int) queue.GetSize()) {
+            maxSize = (int) queue.GetSize();
+            maxQueue = &queue;
+        }
+    }
+
+    if (maxSize - minSize > MAX_DIFF) {
+        // __DebugPrint("Clear MAX_DIFF");
+        for (int i = 0; i < maxSize - minSize; ++i) {
+            maxQueue->Pop();
+        }
+    }
+
+    for (auto iter = _audioInputInfos.begin(); iter != _audioInputInfos.end(); ++iter) {
+        auto&& frameQueue = iter->second.resampler->GetQueue();
+        if (minSize > MAX_BUF_SIZE) {
+            // __DebugPrint("Clear MAX_BUF_SIZE");
+            for (int i = 0; i < minSize - 2; ++i) {
+                frameQueue.Pop();
+            }
+        }
+        auto frame = frameQueue.Pop();
+        auto scale = iter->second.scale;
+        auto writeStream = (float*) (_outputFrame->data[0]);
+        auto readStream = (float*) (frame.frame->data[0]);
+        iter->second.volume = readStream[0] * scale;
+
+        if (iter == _audioInputInfos.begin()) {
+            if (std::abs(scale - 1)
+                < 0.01) { // 这种情况可以直接使用 memcpy 而不是下面那种低效率的逐个赋值
+                memcpy(writeStream, readStream, _outputFrame->linesize[0]);
+                continue;
+            }
+            // 要进行 scale, 只能逐个赋值
+            // 所以这里要清零
+            memset(writeStream, 0, _outputFrame->linesize[0]);
+        }
+        // 逐个计算赋值
+        for (int idx = 0; idx < _outputFrame->nb_samples; ++idx) {
+            writeStream[idx] += readStream[idx] * scale;
+            if (writeStream[idx] > 0.99) {
+                writeStream[idx] = 0.99f;
+            }
+        }
+    }
+    return true;
+}
+
+AudioMixer::AudioMixer()
+    : _inited(false)
+{}
+
+AudioMixer::~AudioMixer()
+{
+    // delete out_buf;
+    if (_inited) {
+        Close();
+    }
+}
+
+bool AudioMixer::AddAudioInput(uint32_t index,
+                               uint32_t sampleRate,
+                               uint32_t channels,
+                               uint32_t bitsPerSample,
+                               AVSampleFormat format)
+{
+    std::lock_guard<std::mutex> locker(_mutex);
+    __CheckBool(!_inited);
+    // 根据index保存是否已经存在
+    __CheckBool(_audioInputInfos.find(index) == _audioInputInfos.end());
+
+    auto& filterInfo = _audioInputInfos[index];
+    // 设置音频相关参数
+    filterInfo.sampleRate = sampleRate;
+    filterInfo.channels = channels;
+    filterInfo.bitsPerSample = bitsPerSample;
+    filterInfo.format = format;
+    filterInfo.name = std::string("input") + std::to_string(index);
+    return true;
+}
+
+bool AudioMixer::AddAudioOutput(const uint32_t sampleRate,
+                                const uint32_t channels,
+                                const uint32_t bitsPerSample,
+                                const AVSampleFormat format)
+{
+    std::lock_guard<std::mutex> locker(_mutex);
+    __CheckBool(!_inited);
+    // 设置音频相关参数
+    _audioOutputInfo.sampleRate = sampleRate;
+    _audioOutputInfo.channels = channels;
+    _audioOutputInfo.bitsPerSample = bitsPerSample;
+    _audioOutputInfo.format = format;
+    _audioOutputInfo.name = "output";
+    return true;
+}
+
+bool AudioMixer::SetOutFrameSize(int outFrameSize)
+{
+    if (_outFrameSize == outFrameSize) {
+        return true;
+    }
+    _outFrameSize = outFrameSize;
+    for (auto&& filterInfoPair : _audioInputInfos) {
+        auto&& filterInfo = filterInfoPair.second;
+        filterInfo.resampler = std::make_unique<Resampler>();
+        __CheckBool(filterInfo.resampler->Open(filterInfo.channels,
+                                               filterInfo.sampleRate,
+                                               filterInfo.format,
+                                               _audioOutputInfo.channels,
+                                               _audioOutputInfo.sampleRate,
+                                               _audioOutputInfo.format,
+                                               outFrameSize));
+    }
+    AVChannelLayout tmpLayout;
+    av_channel_layout_default(&tmpLayout, _audioOutputInfo.channels);
+    Free(_outputFrame, [this] { av_frame_free(&_outputFrame); });
+    __CheckBool(_outputFrame = Frame<MediaType::AUDIO>::Alloc(_audioOutputInfo.format,
+                                                              &tmpLayout,
+                                                              _audioOutputInfo.sampleRate,
+                                                              outFrameSize));
+    _inited = true;
+    return true;
+}
+
+bool AudioMixer::Close()
+{
+    if (!_inited) {
+        return true;
+    }
+    _inited = false;
+    std::lock_guard<std::mutex> locker(_mutex);
+    _audioInputInfos.clear();
+    Free(_outputFrame, [this] { av_frame_free(&_outputFrame); });
+    _outFrameSize = 0;
+    return true;
+}
+
+AudioMixer::AudioInfo* AudioMixer::GetInputInfo(uint32_t index)
+{
+    auto iter = _audioInputInfos.find(index);
+    return iter == _audioInputInfos.end() ? nullptr : &(iter->second);
+}

+ 100 - 0
AvRecorder/encoder/audio_mixer.h

@@ -0,0 +1,100 @@
+#ifndef __AUDIO_MIXER_H__
+#define __AUDIO_MIXER_H__
+
+#include <cstdint>
+#include <memory>
+#include <mutex>
+#include <queue>
+#include <string>
+#include <unordered_map>
+
+extern "C" {
+#include <libavcodec/avcodec.h>
+#include <libavfilter/buffersink.h>
+#include <libavfilter/buffersrc.h>
+#include <libavformat/avformat.h>
+#include <libavutil/opt.h>
+#include <libswresample/swresample.h>
+}
+
+#include "basic/frame.h"
+
+#define __PCM1_FRAME_SIZE (4096 * 2)
+#define __PCM2_FRAME_SIZE (4096)
+#define __PCM_OUT_FRAME_SIZE (40000)
+
+// 循环缓存空间
+class FrameQueue {
+public:
+    bool Init(int channelNums, int sampleRate, AVSampleFormat fmt, int nbSamples);
+    Frame<MediaType::AUDIO> Pop();
+    void Push(uint8_t* data, int length);
+    bool IsEmpty() const { return _queue.size() < 2; }
+    auto GetSize() const { return _queue.size(); }
+
+private:
+    int _front = 0;
+    AVChannelLayout _layout;
+    int _sampleRate;
+    int _nbSamples;
+    int _usedLinesize;
+    AVSampleFormat _fmt;
+    std::queue<Frame<MediaType::AUDIO>> _queue;
+};
+
+class Resampler {
+public:
+    bool Open(int inChannelNums, int inSampleRate, AVSampleFormat inFmt,
+        int outChannelNums, int outSampleRate, AVSampleFormat outFmt, int outNbSample);
+    bool Convert(uint8_t* data, int size);
+    void Close();
+    FrameQueue& GetQueue() { return _toQueue; }
+    ~Resampler() { Close(); }
+
+private:
+    AVFrame* _swrFrame = nullptr;
+    SwrContext* _swrCtx = nullptr;
+    FrameQueue _fromQueue;
+    FrameQueue _toQueue;
+};
+
+class AudioMixer {
+public:
+    struct AudioInfo {
+        uint32_t sampleRate;
+        uint32_t channels;
+        uint32_t bitsPerSample;
+        AVSampleFormat format;
+        std::string name;
+        std::unique_ptr<Resampler> resampler;
+        float volume = 0;
+        float scale = 1;
+        int callTime = 0;
+    };
+    AudioMixer();
+    virtual ~AudioMixer();
+    // 添加音频输入通道
+    bool AddAudioInput(uint32_t index, uint32_t sampleRate, uint32_t channels,
+        uint32_t bitsPerSample, AVSampleFormat format);
+    // 添加音频输出通道
+    bool AddAudioOutput(const uint32_t sampleRate, const uint32_t channels,
+        const uint32_t bitsPerSample, const AVSampleFormat format);
+    AVFrame* Convert(uint32_t index, uint8_t* inBuf, uint32_t size);
+    bool SetOutFrameSize(int outputFrameSize = 1024);
+    int GetOutFrameSize() const { return _outFrameSize; };
+    bool Close();
+    AudioInfo* GetInputInfo(uint32_t index);
+
+private:
+    bool _inited = false;
+    std::mutex _mutex;
+    // 输入
+    std::unordered_map<uint32_t, AudioInfo> _audioInputInfos;
+    // 转换格式
+    AudioInfo _audioOutputInfo;
+    AVFrame* _outputFrame = nullptr;
+    bool _AdjustVolume();
+    int _outFrameSize = 0;
+};
+
+#endif // AUDIOMIXER_H

+ 200 - 0
AvRecorder/encoder/video_encoder.cpp

@@ -0,0 +1,200 @@
+#include "video_encoder.h"
+
+extern "C" {
+#include <libavutil/opt.h>
+}
+
+std::vector<std::string> Encoder<MediaType::VIDEO>::_usableEncoders;
+
+Encoder<MediaType::VIDEO>::Encoder() {}
+
+bool Encoder<MediaType::VIDEO>::Open(const Param& encodeParam, AVFormatContext* fmtCtx)
+{
+    Close();
+    _isOpen = false;
+    __CheckBool(_Init(encodeParam, fmtCtx));
+
+    // 打开编码器
+    __CheckBool(avcodec_open2(_codecCtx, _codec, nullptr) >= 0);
+
+    _isOpen = true;
+    return true;
+}
+
+bool Encoder<MediaType::VIDEO>::PushFrame(AVFrame* frame, bool isEnd, uint64_t pts)
+{
+    if (!isEnd) {
+        __CheckBool(_Trans(frame));
+        frame = _bufferFrame;
+        __CheckBool(frame);
+    } else {
+        frame = nullptr; // 直接刷新编码器缓存
+    }
+    if (frame != nullptr) {
+        frame->pts = pts;
+    }
+    __CheckBool(avcodec_send_frame(_codecCtx, frame) >= 0);
+    return true;
+}
+
+void Encoder<MediaType::VIDEO>::AfterEncode()
+{
+    if (_isHardware) {
+        Free(_hwFrame, [this] { av_frame_free(&_hwFrame); });
+    }
+}
+
+void Encoder<MediaType::VIDEO>::Close()
+{
+    if (_codecCtx != nullptr) {
+        avcodec_free_context(&_codecCtx);
+    }
+
+    Free(_codecCtx, [this] { avcodec_free_context(&_codecCtx); });
+    Free(_hwDeviceCtx, [this] { av_buffer_unref(&_hwDeviceCtx); });
+    _converter = nullptr;
+}
+
+const std::vector<std::string>& Encoder<MediaType::VIDEO>::GetUsableEncoders()
+{
+    if (_usableEncoders.empty()) {
+        _FindUsableEncoders();
+    }
+    return _usableEncoders;
+}
+
+void Encoder<MediaType::VIDEO>::_FindUsableEncoders()
+{
+    // 尝试打开编码器看看编码器能不能用
+    Param param;
+    param.bitRate = 1000;
+    param.fps = 30;
+    param.width = 1920;
+    param.height = 1080;
+    Encoder encoder;
+    AVFormatContext* fmtCtx = nullptr;
+
+    __CheckNo(avformat_alloc_output_context2(&fmtCtx, nullptr, nullptr, "test.mp4") >= 0);
+    for (const auto& name : _encoderNames) {
+        if (strcmp(name, "libx264") == 0) { // 软件编码器必定支持
+            _usableEncoders.push_back(name);
+            continue;
+        }
+        param.name = name;
+        if (encoder.Open(param, fmtCtx)) {
+            _usableEncoders.push_back(name);
+        }
+        encoder.Close();
+    }
+    Free(fmtCtx, [&fmtCtx] { avformat_free_context(fmtCtx); });
+}
+
+bool Encoder<MediaType::VIDEO>::_Init(const Param& encodeParam, AVFormatContext* fmtCtx)
+{
+    _isHardware = encodeParam.name != "libx264";
+    AVHWDeviceType hwType;
+    if (encodeParam.name == "libx264") {
+        _pixFmt = AV_PIX_FMT_NV12;
+    } else if (encodeParam.name == "h264_nvenc") {
+        _pixFmt = AV_PIX_FMT_CUDA;
+        hwType = AV_HWDEVICE_TYPE_CUDA;
+    } else if (encodeParam.name == "h264_qsv") {
+        _pixFmt = AV_PIX_FMT_QSV;
+        hwType = AV_HWDEVICE_TYPE_QSV;
+    } else if (encodeParam.name == "h264_amf") {
+        _pixFmt = AV_PIX_FMT_VULKAN;
+        hwType = AV_HWDEVICE_TYPE_VULKAN;
+    }
+    _isHardware = _pixFmt != AV_PIX_FMT_NV12;
+    if (_isHardware
+        && av_hwdevice_ctx_create(&_hwDeviceCtx, hwType, nullptr, nullptr, 0) < 0) { // 硬件解码
+        __DebugPrint("av_hwdevice_ctx_create failed\n");
+        return false;
+    }
+    __CheckBool(_codec = avcodec_find_encoder_by_name(encodeParam.name.c_str()));
+    __CheckBool(_codecCtx = avcodec_alloc_context3(_codec));
+    _codecCtx->bit_rate = encodeParam.bitRate;
+    _codecCtx->width = encodeParam.width;
+    _codecCtx->height = encodeParam.height;
+    _codecCtx->time_base = {1, encodeParam.fps};
+    _codecCtx->framerate = {encodeParam.fps, 1};
+
+    // 影响缓冲区大小
+    _codecCtx->gop_size = 5;
+    _codecCtx->max_b_frames = 0;
+    _codecCtx->pix_fmt = _pixFmt;
+
+    /* Some formats want stream headers to be separate. */
+    if (fmtCtx->oformat->flags & AVFMT_GLOBALHEADER) {
+        _codecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
+    }
+
+    if (!_isHardware) { // 软件编码设置为快,避免占用过高的 CPU ,反正硬盘不值钱
+        // av_opt_set(_codecCtx->priv_data, "preset", "veryfast", 0);
+        av_opt_set(_codecCtx->priv_data, "preset", "ultrafast", 0); // 使用最快的预设
+        av_opt_set(_codecCtx->priv_data, "tune", "zerolatency", 0); // 零延迟调优
+    } else {
+        // 为硬件编码器添加低延迟设置
+        av_opt_set(_codecCtx->priv_data, "preset", "llhp", 0); // 低延迟高性能
+        av_opt_set(_codecCtx->priv_data, "zerolatency", "1", 0);
+    }
+
+    __CheckBool(!_isHardware || _SetHwFrameCtx());
+    return true;
+}
+bool Encoder<MediaType::VIDEO>::_SetHwFrameCtx()
+{
+    AVBufferRef* hwFramesRef;
+    AVHWFramesContext* framesCtx = nullptr;
+
+    __CheckBool(hwFramesRef = av_hwframe_ctx_alloc(_hwDeviceCtx));
+    framesCtx = (AVHWFramesContext*) (hwFramesRef->data);
+    framesCtx->format = _pixFmt;
+    framesCtx->sw_format = AV_PIX_FMT_NV12;
+    framesCtx->width = _codecCtx->width;
+    framesCtx->height = _codecCtx->height;
+    framesCtx->initial_pool_size = 20;
+    if (av_hwframe_ctx_init(hwFramesRef) < 0) {
+        __DebugPrint("av_hwframe_ctx_init failed\n");
+        av_buffer_unref(&hwFramesRef);
+        return false;
+    }
+    __CheckBool(_codecCtx->hw_frames_ctx = av_buffer_ref(hwFramesRef));
+    av_buffer_unref(&hwFramesRef);
+    return true;
+}
+
+bool Encoder<MediaType::VIDEO>::_Trans(AVFrame* frame)
+{
+    std::lock_guard<std::mutex> lk(__mtx);
+    if (!_isOpen) {
+        return false;
+    }
+    if (frame->format == AV_PIX_FMT_NV12) {
+        _bufferFrame = frame;
+    } else {
+        if (_converter == nullptr) {
+            _converter = std::make_unique<FfmpegConverter>(AVPixelFormat(frame->format),
+                                                           AV_PIX_FMT_NV12);
+            _converter->SetSize(frame->width, frame->height);
+        }
+        _bufferFrame = _converter->Trans(frame);
+    }
+    if (_isHardware) {
+        _bufferFrame = _ToHardware();
+    }
+    __CheckBool(_bufferFrame);
+    return true;
+}
+
+AVFrame* Encoder<MediaType::VIDEO>::_ToHardware()
+{
+    if (_bufferFrame == nullptr) {
+        return nullptr;
+    }
+    __CheckNullptr(_hwFrame = av_frame_alloc());
+    __CheckNullptr(av_hwframe_get_buffer(_codecCtx->hw_frames_ctx, _hwFrame, 0) >= 0);
+    __CheckNullptr(_hwFrame->hw_frames_ctx);
+    __CheckNullptr(av_hwframe_transfer_data(_hwFrame, _bufferFrame, 0) >= 0);
+    return _hwFrame;
+}

+ 47 - 0
AvRecorder/encoder/video_encoder.h

@@ -0,0 +1,47 @@
+#ifndef __VIDEO_ENCODER_H__
+#define __VIDEO_ENCODER_H__
+
+#include "abstract_encoder.h"
+#include "basic/frame.h"
+
+template <>
+class Encoder<MediaType::VIDEO> : public AbstractEncoder {
+public:
+    struct Param {
+        int bitRate;
+        int width;
+        int height;
+        int fps;
+        std::string name;
+    };
+    Encoder();
+    ~Encoder() { Close(); }
+    bool Open(const Param& encodeParam, AVFormatContext* fmtCtx);
+    virtual bool PushFrame(AVFrame* frame, bool isEnd, uint64_t pts) override;
+    virtual void AfterEncode() override;
+    virtual void Close() override;
+    static const std::vector<std::string>& GetUsableEncoders();
+
+private:
+    bool
+    _Init(const Param& encodeParam, AVFormatContext* fmtCtx);
+    bool _SetHwFrameCtx();
+    bool _Trans(AVFrame* frame);
+    AVFrame* _ToHardware();
+    static void _FindUsableEncoders();
+    bool _isHardware = false;
+    std::unique_ptr<FfmpegConverter> _converter = nullptr;
+    AVFrame* _bufferFrame = nullptr;
+    static constexpr const char* _encoderNames[4] = {
+        "h264_nvenc",
+        "h264_qsv",
+        "h264_amf",
+        "libx264",
+    };
+    static std::vector<std::string> _usableEncoders;
+    AVBufferRef* _hwDeviceCtx = nullptr;
+    AVFrame* _hwFrame = nullptr;
+    AVPixelFormat _pixFmt = AV_PIX_FMT_NV12;
+};
+
+#endif

+ 12 - 0
AvRecorder/main.cpp

@@ -0,0 +1,12 @@
+
+#include "ui/av_recorder.h"
+#include <QApplication>
+
+int main(int argc, char* argv[])
+{
+    QApplication a(argc, argv);
+    AvRecorder w;
+
+    w.show();
+    return a.exec();
+}

+ 152 - 0
AvRecorder/muxer/av_muxer.cpp

@@ -0,0 +1,152 @@
+
+#include "av_muxer.h"
+
+bool AvMuxer::Open(std::string_view filePath, std::string_view format)
+{
+    Close();
+    _isOpenFile = false;
+    _filePath = filePath;
+    __CheckBool(avformat_alloc_output_context2(&_fmtCtx, nullptr, format.data(), _filePath.c_str()) >= 0);
+    __CheckBool(_fmtCtx);
+    return true;
+}
+
+bool AvMuxer::WriteHeader()
+{
+    av_dump_format(_fmtCtx, 0, _filePath.data(), 1);
+    // 打开输出文件
+    if (!(_fmtCtx->oformat->flags & AVFMT_NOFILE)) {
+        __CheckBool(avio_open(&_fmtCtx->pb, _filePath.c_str(), AVIO_FLAG_WRITE) >= 0);
+    }
+    // 写入文件头
+    __CheckBool(avformat_write_header(_fmtCtx, nullptr) >= 0);
+    // _fmtCtx->flags |= AVFMT_FLAG_NOBUFFER;  // 无缓冲
+    // _fmtCtx->flags |= AVFMT_FLAG_FLUSH_PACKETS;  // 立即刷新数据包
+    _isOpenFile = true;
+    return true;
+}
+
+int AvMuxer::AddVideoStream(const Encoder<MediaType::VIDEO>::Param& param)
+{
+    __Check(-1, _fmtCtx->oformat->video_codec != AV_CODEC_ID_NONE);
+    Info info;
+    info.pts = 0;
+    info.fps = param.fps;
+    auto encoder = new Encoder<MediaType::VIDEO>;
+    __Check(-1, encoder->Open(param, _fmtCtx));
+    info.type = MediaType::VIDEO;
+    info.encoder = encoder;
+    __Check(-1, _AddStream(info));
+    _infos.back().stream->time_base = {1, info.fps};
+    return info.streamIndex;
+}
+
+int AvMuxer::AddAudioStream(const Encoder<MediaType::AUDIO>::Param& param)
+{
+    __Check(-1, _fmtCtx->oformat->audio_codec != AV_CODEC_ID_NONE);
+    Info info;
+    info.pts = 0;
+    info.fps = AUDIO_SAMPLE_RATE;
+    auto encoder = new Encoder<MediaType::AUDIO>;
+    info.type = MediaType::AUDIO;
+    info.encoder = encoder;
+    __Check(-1, encoder->Open(param, _fmtCtx));
+    __Check(-1, _AddStream(info));
+    _infos.back().stream->time_base = {1, AUDIO_SAMPLE_RATE};
+    return info.streamIndex;
+}
+
+bool AvMuxer::Write(AVFrame* frame, int streamIndex, bool isEnd)
+{
+    // 此函数不能被多个流同时调用
+    std::lock_guard<std::mutex> lk(_mtx);
+    __CheckBool(_infos.size() > streamIndex);
+    auto&& info = _infos[streamIndex];
+    if (info.isEnd) {
+        return true;
+    }
+    if (isEnd) {
+        info.isEnd = isEnd;
+        frame = nullptr;
+    }
+    __CheckBool(info.encoder);
+    // 检测流之间时间是不是差的太多,如果差的太多,直接弃掉数据多的流数据
+    if (!_CheckTime(double(info.pts) / info.fps)) {
+        info.isEncodeOverload = true;
+        return false;
+    }
+    info.isEncodeOverload = false;
+    __CheckBool(info.encoder->PushFrame(frame, isEnd, info.pts));
+    info.pts += info.type == MediaType::AUDIO ? info.encoder->GetCtx()->frame_size : 1; // 更新 pts
+    AVPacket* packet = nullptr;
+    while ((packet = info.encoder->Encode())) {
+        av_packet_rescale_ts(packet, info.encoder->GetCtx()->time_base, info.stream->time_base);
+        packet->stream_index = info.stream->index;
+        __CheckBool(av_interleaved_write_frame(_fmtCtx, packet) >= 0);
+    }
+    info.encoder->AfterEncode();
+    return true;
+}
+
+bool AvMuxer::_CheckTime(double time)
+{
+    auto minTime = double(_infos.front().pts) / _infos.front().fps;
+    for (int idx = 1; idx < _infos.size(); ++idx) {
+        minTime = std::min(double(_infos[idx].pts) / _infos[idx].fps, minTime);
+    }
+    if (time - minTime > 0.1) { // 说明相差的太多了,下一帧不能再送往编码器
+        return false;
+    }
+    return true;
+}
+
+void AvMuxer::Close()
+{
+    if (_fmtCtx == nullptr) {
+        return;
+    }
+    // 清空编码器缓存
+    for (int index = 0; index < _infos.size(); ++index) {
+        __DebugPrint("stream: %d, time:%f", index, double(_infos[index].pts) / _infos[index].fps);
+    }
+    if (_isOpenFile) {
+        __CheckNo(av_write_trailer(_fmtCtx) >= 0);
+        Free(_fmtCtx->pb, [this] { avio_closep(&_fmtCtx->pb); });
+    }
+    _isOpenFile = false;
+
+    for (auto&& info : _infos) {
+        info.encoder->Close();
+        Free(info.encoder, [&info] {info.encoder->Close(); delete info.encoder; });
+    }
+    _infos.clear();
+    Free(_fmtCtx, [this] { avformat_free_context(_fmtCtx); });
+}
+
+bool AvMuxer::_AddStream(Info& info)
+{
+    __CheckBool(info.stream = avformat_new_stream(_fmtCtx, nullptr));
+    info.stream->id = _fmtCtx->nb_streams - 1;
+    __CheckBool(avcodec_parameters_from_context(info.stream->codecpar, info.encoder->GetCtx()) >= 0);
+    info.streamIndex = _fmtCtx->nb_streams - 1;
+    info.pts = 0;
+    info.isEnd = false;
+    _infos.push_back(info);
+    return true;
+}
+
+AVCodecContext* AvMuxer::GetCodecCtx(int streamIndex)
+{
+    __CheckNullptr(streamIndex >= 0 && _infos.size() > streamIndex);
+    return _infos[streamIndex].encoder->GetCtx();
+}
+
+bool AvMuxer::IsEncodeOverload() const
+{
+    for (auto&& info : _infos) {
+        if (info.isEncodeOverload) {
+            return true;
+        }
+    }
+    return false;
+}

+ 43 - 0
AvRecorder/muxer/av_muxer.h

@@ -0,0 +1,43 @@
+#ifndef __AV_MUXER_H__
+#define __AV_MUXER_H__
+
+#include "encoder/audio_encoder.h"
+#include "encoder/video_encoder.h"
+
+class AvMuxer {
+public:
+    struct Info {
+        MediaType type;
+        AbstractEncoder* encoder = nullptr;
+        AVStream* stream = nullptr;
+        int streamIndex = -1;
+        int fps = 30;
+        uint64_t pts = 0;
+        bool isEnd = false;
+        bool isEncodeOverload = false;
+    };
+    ~AvMuxer()
+    {
+        Close();
+    }
+    bool Open(std::string_view filePath, std::string_view format = "mp4");
+    bool WriteHeader();
+    // 返回值为创建的流的索引 ,-1表示创建失败
+    int AddVideoStream(const Encoder<MediaType::VIDEO>::Param& param);
+    int AddAudioStream(const Encoder<MediaType::AUDIO>::Param& param);
+    bool Write(AVFrame* frame, int streamIndex, bool isEnd = false);
+    void Close();
+    AVCodecContext* GetCodecCtx(int streamIndex);
+    bool IsEncodeOverload() const;
+
+private:
+    std::mutex _mtx;
+    bool _isOpenFile = false;
+    bool _AddStream(Info& info);
+    bool _CheckTime(double time);
+    std::vector<Info> _infos;
+    AVFormatContext* _fmtCtx = nullptr;
+    std::string _filePath;
+};
+
+#endif

+ 137 - 0
AvRecorder/recorder/audio_recorder.cpp

@@ -0,0 +1,137 @@
+#include "audio_recorder.h"
+
+#include "capturer/audio/audio_qt_capturer.h"
+
+AudioRecorder::AudioRecorder() {}
+
+AudioRecorder::~AudioRecorder()
+{
+    Close();
+}
+
+bool AudioRecorder::Open(const std::vector<AudioCapturer::Type>& deviceTypes,
+                         Encoder<MediaType::AUDIO>::Param& param,
+                         const uint32_t sampleRate,
+                         const uint32_t channels,
+                         const uint32_t bitsPerSample,
+                         const AVSampleFormat format)
+{
+    Close();
+    Info mixInfo;
+    mixInfo.mixer = &_mixer;
+    mixInfo.isRecord = &_isRecord;
+    mixInfo.streamIndex = &_streamIndex;
+
+    // 清空并重新创建音频捕获器
+    m_audioCapturers.clear();
+    for (int index = 0; index < deviceTypes.size(); ++index) {
+        mixInfo.mixIndex = index;
+        _infos.push_back(mixInfo);
+        m_audioCapturers.push_back(new QtAudioCapturer());
+    }
+
+    // 初始化每个音频捕获器
+    for (int index = 0; index < deviceTypes.size(); ++index) {
+        auto capturer = m_audioCapturers[index];
+        if (!capturer->Init(deviceTypes[index], _Callback, &(_infos[index]))) {
+            continue;
+        }
+        auto&& format = capturer->GetFormat();
+        __CheckBool(_mixer.AddAudioInput(index,
+                                         format.sampleRate,
+                                         format.channels,
+                                         format.bitsPerSample,
+                                         _GetAVSampleFormat(format.bitsPerSample)));
+    }
+
+    __CheckBool(_mixer.AddAudioOutput(sampleRate, channels, bitsPerSample, format));
+    _param = param;
+    __CheckBool(_mixer.SetOutFrameSize(1024));
+
+    // 启动所有成功初始化的音频捕获器
+    for (int index = 0; index < deviceTypes.size(); ++index) {
+        if (_mixer.GetInputInfo(index) != nullptr) {
+            __CheckBool(m_audioCapturers[index]->Start());
+        }
+    }
+
+    return true;
+}
+
+void AudioRecorder::Close()
+{
+    StopRecord();
+
+    // 停止并释放所有音频捕获器
+    for (auto capturer : m_audioCapturers) {
+        if (capturer) {
+            capturer->Stop();
+            delete capturer;
+        }
+    }
+    m_audioCapturers.clear();
+
+    _mixer.Close();
+    _infos.clear();
+}
+
+void AudioRecorder::SetVolumeScale(float scale, int mixIndex)
+{
+    auto info = _mixer.GetInputInfo(mixIndex);
+    if (info != nullptr) {
+        info->scale = scale;
+    }
+}
+
+bool AudioRecorder::LoadMuxer(AvMuxer& muxer)
+{
+    for (auto&& info : _infos) {
+        info.muxer = &muxer;
+    }
+    __CheckBool((_streamIndex = muxer.AddAudioStream(_param)) != -1);
+    return true;
+}
+
+bool AudioRecorder::StartRecord()
+{
+    _isRecord = true;
+    return true;
+}
+
+void AudioRecorder::StopRecord()
+{
+    _isRecord = false;
+}
+
+void AudioRecorder::_Callback(void* data, size_t size, void* userInfo)
+{
+    if (!data || size == 0 || !userInfo) {
+        return;
+    }
+
+    Info* info = static_cast<Info*>(userInfo);
+    if (!info->mixer || !info->isRecord) {
+        return;
+    }
+
+    // 添加调试输出
+    // qDebug() << "AudioRecorder: 收到音频数据,大小:" << size << "字节,混音索引:" << info->mixIndex;
+
+    /* auto inputInfo =*/info->mixer->GetInputInfo(info->mixIndex);
+    auto frame = info->mixer->Convert(info->mixIndex, (uint8_t*) data, size);
+    if (frame == nullptr) {
+        return;
+    }
+    if (*(info->isRecord)) {
+        __CheckNo(info->streamIndex && *(info->streamIndex) != -1);
+        int frameSize = info->muxer->GetCodecCtx(*info->streamIndex)->frame_size;
+        if (info->mixer->GetOutFrameSize() != frameSize) {
+            __DebugPrint("Change frame size from %d to %d",
+                         info->mixer->GetOutFrameSize(),
+                         frameSize);
+            info->mixer->SetOutFrameSize(frameSize);
+            return;
+        }
+        __CheckNo(info->muxer->Write(frame, *(info->streamIndex)));
+    }
+}

+ 50 - 0
AvRecorder/recorder/audio_recorder.h

@@ -0,0 +1,50 @@
+#ifndef __AUDIO_RECORDER_H__
+#define __AUDIO_RECORDER_H__
+
+#include "capturer/audio/audio_capturer.h"
+#include "encoder/audio_mixer.h"
+#include "muxer/av_muxer.h"
+
+class AudioRecorder
+{
+public:
+    AudioRecorder();
+    ~AudioRecorder();
+
+    struct Info
+    {
+        AudioMixer* mixer = nullptr;
+        AvMuxer* muxer = nullptr;
+        bool* isRecord = nullptr;
+        int mixIndex;
+        int* streamIndex = nullptr;
+    };
+
+    bool Open(const std::vector<AudioCapturer::Type>& deviceTypes,
+              Encoder<MediaType::AUDIO>::Param& param,
+              const uint32_t sampleRate = AUDIO_SAMPLE_RATE,
+              const uint32_t channels = AUDIO_CHANNEL,
+              const uint32_t bitsPerSample = 32,
+              const AVSampleFormat format = AUDIO_FMT);
+    bool LoadMuxer(AvMuxer& muxer);
+    bool StartRecord();
+    void StopRecord();
+    void Close();
+    auto GetCaptureInfo(int mixIndex) { return _mixer.GetInputInfo(mixIndex); }
+    void SetVolumeScale(float scale, int mixIndex);
+
+private:
+    std::vector<IAudioCapturer*> m_audioCapturers;
+    AudioMixer _mixer;
+    std::vector<Info> _infos;
+    bool _isRecord = false;
+    int _streamIndex;
+    Encoder<MediaType::AUDIO>::Param _param;
+    static void _Callback(void* data, size_t size, void* userInfo);
+    AVSampleFormat _GetAVSampleFormat(int wBitsPerSample)
+    {
+        return wBitsPerSample == 16 ? AV_SAMPLE_FMT_S16 : AV_SAMPLE_FMT_S32;
+    }
+};
+
+#endif

+ 107 - 0
AvRecorder/recorder/video_recorder.cpp

@@ -0,0 +1,107 @@
+#include "video_recorder.h"
+
+bool VideoRecorder::Open(HWND srcHwnd,
+                         Encoder<MediaType::VIDEO>::Param& param,
+                         VideoCapturer::Method type)
+{
+    Close();
+    __CheckBool(_capturer.Open(srcHwnd, type));
+    __CheckBool(_Open(param));
+    return true;
+}
+
+bool VideoRecorder::Open(int monitorIdx,
+                         Encoder<MediaType::VIDEO>::Param& param,
+                         VideoCapturer::Method type)
+{
+    Close();
+    __CheckBool(_capturer.Open(monitorIdx, type));
+    __CheckBool(_Open(param));
+    return true;
+}
+
+bool VideoRecorder::_Open(Encoder<MediaType::VIDEO>::Param& param)
+{
+    __CheckBool(_encodeFrame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_NV12,
+                                                              _capturer.GetWidth(),
+                                                              _capturer.GetHeight()));
+    {
+        std::lock_guard<std::mutex> renderLk(_renderMtx);
+        __CheckBool(_renderFrame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_NV12,
+                                                                  _capturer.GetWidth(),
+                                                                  _capturer.GetHeight()));
+    }
+
+    // 开始捕获画面
+    _captureTimer.Start(param.fps, [this] {
+        auto srcFrame = _capturer.GetFrame();
+        if (srcFrame != nullptr) {
+            std::lock_guard<std::mutex> muxLk(__mtx);
+            if (srcFrame->format != _encodeFrame->format) {
+                std::lock_guard<std::mutex> renderLk(_renderMtx);
+                Free(_encodeFrame, [this] { av_frame_free(&_encodeFrame); });
+                __CheckNo(
+                    _encodeFrame = Frame<MediaType::VIDEO>::Alloc(AVPixelFormat(srcFrame->format),
+                                                                  _capturer.GetWidth(),
+                                                                  _capturer.GetHeight()));
+            }
+            av_frame_copy(_encodeFrame, srcFrame);
+        }
+    });
+    param.width = _capturer.GetWidth();
+    param.height = _capturer.GetHeight();
+    _param = param;
+    return true;
+}
+
+AVFrame* VideoRecorder::GetRenderFrame()
+{
+    std::lock_guard<std::mutex> renderLk(_renderMtx);
+    if (_encodeFrame == nullptr) {
+        return nullptr;
+    }
+    if (_renderFrame->format != _encodeFrame->format) {
+        Free(_renderFrame, [this] { av_frame_free(&_renderFrame); });
+        __CheckNullptr(
+            _renderFrame = Frame<MediaType::VIDEO>::Alloc(AVPixelFormat(_encodeFrame->format),
+                                                          _capturer.GetWidth(),
+                                                          _capturer.GetHeight()));
+    }
+    av_frame_copy(_renderFrame, _encodeFrame);
+    return _renderFrame;
+}
+
+bool VideoRecorder::LoadMuxer(AvMuxer& muxer)
+{
+    _muxer = &muxer;
+    __CheckBool((_streamIndex = muxer.AddVideoStream(_param)) != -1);
+    return true;
+}
+
+bool VideoRecorder::StartRecord()
+{
+    _totalPts = 0;
+    _lossPts = 0;
+    _muxTimer.Start(_param.fps, [this] {
+        ++_totalPts;
+        if (!_muxer->Write(_encodeFrame, _streamIndex)) {
+            ++_lossPts;
+        }
+    });
+    _isRecord = true;
+    return true;
+}
+void VideoRecorder::StopRecord()
+{
+    _isRecord = false;
+    _muxTimer.Stop();
+}
+
+void VideoRecorder::Close()
+{
+    StopRecord();
+    _captureTimer.Stop();
+    _capturer.Close();
+    Free(_encodeFrame, [this] { av_frame_free(&_encodeFrame); });
+    Free(_renderFrame, [this] { av_frame_free(&_renderFrame); });
+}

+ 43 - 0
AvRecorder/recorder/video_recorder.h

@@ -0,0 +1,43 @@
+#ifndef __VIDEO_RECORDER_H__
+#define __VIDEO_RECORDER_H__
+
+#include "basic/timer.h"
+#include "capturer/video/video_capturer.h"
+#include "muxer/av_muxer.h"
+// #include <condition_variable>
+// #include <queue>
+
+class VideoRecorder {
+public:
+    bool Open(HWND srcHwnd, Encoder<MediaType::VIDEO>::Param& param, VideoCapturer::Method type);
+    bool Open(int monitorIdx, Encoder<MediaType::VIDEO>::Param& param, VideoCapturer::Method type);
+    bool LoadMuxer(AvMuxer& muxer);
+    bool StartRecord();
+    void StopRecord();
+    auto GetCapturerType() { return _capturer.GetMethod(); }
+    AVFrame* GetRenderFrame();
+    // 停止录制
+    void Close();
+    void SetIsDrawCursor(bool isDraw)
+    {
+        _capturer.SetDrawCursor(isDraw);
+    }
+    bool IsCaptureOverload() const { return _captureTimer.IsOverload(); }
+    double GetLossRate() { return _lossPts == 0 ? 0 : (double)_lossPts / _totalPts; }
+
+private:
+    bool _Open(Encoder<MediaType::VIDEO>::Param& param);
+    VideoCapturer _capturer;
+    AvMuxer* _muxer = nullptr;
+    bool _isRecord = false;
+    int _streamIndex = -1;
+    AVFrame* _encodeFrame = nullptr;
+    AVFrame* _renderFrame = nullptr;
+    Encoder<MediaType::VIDEO>::Param _param;
+    Timer _captureTimer;
+    Timer _muxTimer;
+    std::mutex _renderMtx;
+    uint64_t _totalPts = 0;
+    uint64_t _lossPts = 0;
+};
+#endif

+ 4 - 0
AvRecorder/test/basic/basic.cpp

@@ -0,0 +1,4 @@
+
+#include "basic/basic.h"
+
+std::mutex __mtx;

+ 85 - 0
AvRecorder/test/basic/basic.h

@@ -0,0 +1,85 @@
+#ifndef __BASIC_FUCN_H__
+#define __BASIC_FUCN_H__
+#define __STDC_FORMAT_MACROS
+
+#include <functional>
+#include <mutex>
+#include <thread>
+
+extern "C" {
+#include <libavcodec/avcodec.h>
+#include <libavformat/avformat.h>
+}
+
+// ***************
+// MUTEX
+extern std::mutex __mtx;
+
+// ***************
+// debug function
+
+#define __AVDEBUG
+
+#ifdef __AVDEBUG
+#define __DebugPrint(fmtStr, ...) \
+    std::printf("[" __FILE__ ", line:%d] " fmtStr "\n", __LINE__, ##__VA_ARGS__)
+#define __Str(exp) #exp
+#define __Check(retVal, ...)                            \
+    do {                                                \
+        if (!(__VA_ARGS__)) {                           \
+            __DebugPrint(__Str(__VA_ARGS__) " failed"); \
+            return retVal;                              \
+        }                                               \
+    } while (false)
+
+#else
+#define __DebugPrint(fmtStr, ...)
+#define __Check(retVal, ...)  \
+    do {                      \
+        if (!(__VA_ARGS__)) { \
+            return retVal;    \
+        }                     \
+    } while (false)
+#endif
+
+#define __CheckNo(...) __Check(, __VA_ARGS__)
+#define __CheckBool(...) __Check(false, __VA_ARGS__)
+#define __CheckNullptr(...) __Check(nullptr, __VA_ARGS__)
+
+enum class MediaType {
+    AUDIO,
+    VIDEO
+};
+
+// ***************
+// memory function
+
+template <typename T, typename Func>
+void Free(T*& ptr, Func&& func)
+{
+    static_assert(std::is_convertible_v<Func, std::function<void()>>, "Type Func should be std::function<void()>");
+    if (ptr == nullptr) {
+        return;
+    }
+
+    func();
+    ptr = nullptr;
+}
+
+//***************
+// time function
+
+// Sleep x ms
+inline void SleepMs(int timeMs)
+{
+    std::this_thread::sleep_for(std::chrono::milliseconds(timeMs));
+}
+
+// 对于音频编码器的全局设置
+constexpr int AUDIO_SAMPLE_RATE = 48000;
+constexpr int AUDIO_CHANNEL = 1;
+constexpr AVSampleFormat AUDIO_FMT = AV_SAMPLE_FMT_FLTP;
+constexpr int MICROPHONE_INDEX = 0;
+constexpr int SPEAKER_INDEX = 1;
+
+#endif

+ 102 - 0
AvRecorder/test/basic/frame.cpp

@@ -0,0 +1,102 @@
+
+#include "basic/frame.h"
+
+extern "C" {
+#include <libswscale/swscale.h>
+}
+
+AVFrame* Frame<MediaType::AUDIO>::Alloc(AVSampleFormat sampleFmt,
+    const AVChannelLayout* channel_layout,
+    int sampleRate, int nbSamples)
+{
+    AVFrame* frame = nullptr;
+    __CheckNullptr(frame = av_frame_alloc());
+    frame->format = sampleFmt;
+    av_channel_layout_copy(&frame->ch_layout, channel_layout);
+    frame->sample_rate = sampleRate;
+    frame->nb_samples = nbSamples;
+
+    /* allocate the buffers for the frame data */
+    __CheckNullptr(av_frame_get_buffer(frame, 0) >= 0);
+    return frame;
+}
+
+Frame<MediaType::AUDIO>::Frame(AVSampleFormat sampleFmt,
+    const AVChannelLayout* channel_layout, int sampleRate,
+    int nbSamples)
+{
+    __CheckNo(frame = Alloc(sampleFmt, channel_layout, sampleRate, nbSamples));
+}
+
+Frame<MediaType::AUDIO>::Frame(AVFrame* frame)
+{
+    if (frame == nullptr) {
+        this->frame = nullptr;
+        return;
+    }
+    __CheckNo(this->frame = Alloc(AVSampleFormat(frame->format), &frame->ch_layout, frame->sample_rate, frame->nb_samples));
+    __CheckNo(av_frame_copy(this->frame, frame) >= 0);
+}
+
+Frame<MediaType::VIDEO>::Frame(AVPixelFormat pixFmt, int width, int height)
+{
+    __CheckNo(frame = Alloc(pixFmt, width, height));
+}
+
+AVFrame* Frame<MediaType::VIDEO>::Alloc(AVPixelFormat pixFmt, int width, int height)
+{
+    AVFrame* frame = nullptr;
+    __CheckNullptr(frame = av_frame_alloc());
+
+    frame->format = pixFmt;
+    frame->width = width;
+    frame->height = height;
+
+    /* allocate the buffers for the frame data */
+    __CheckNullptr(av_frame_get_buffer(frame, 0) >= 0);
+    return frame;
+}
+
+Frame<MediaType::VIDEO>::Frame(AVFrame* frame)
+{
+    if (frame == nullptr) {
+        this->frame = nullptr;
+        return;
+    }
+    __CheckNo(this->frame = Alloc(AVPixelFormat(frame->format), frame->width, frame->height));
+    __CheckNo(av_frame_copy(this->frame, frame) >= 0);
+}
+
+bool FfmpegConverter::SetSize(int width, int height)
+{
+    Free(_swsCtx, [this] { sws_freeContext(_swsCtx); });
+    Free(_frameTo, [this] { av_frame_free(&_frameTo); });
+    // 创建格式转换
+    __CheckBool(_swsCtx = sws_getContext(
+                    width, height, _from,
+                    width, height, _to,
+                    0, NULL, NULL, NULL));
+
+    __CheckBool(_frameTo = Frame<MediaType::VIDEO>::Alloc(_to, width, height));
+    return true;
+}
+
+AVFrame* FfmpegConverter::Trans(AVFrame* frameFrom)
+{
+    // 如果是空指针,直接把缓存返回
+    if (frameFrom == nullptr) {
+        return _frameTo;
+    }
+    __CheckNullptr(
+        sws_scale(_swsCtx, (const uint8_t* const*)frameFrom->data,
+            frameFrom->linesize, 0, frameFrom->height, _frameTo->data,
+            _frameTo->linesize)
+        >= 0);
+    return _frameTo;
+}
+
+FfmpegConverter::~FfmpegConverter()
+{
+    Free(_swsCtx, [this] { sws_freeContext(_swsCtx); });
+    Free(_frameTo, [this] { av_frame_free(&_frameTo); });
+}

+ 78 - 0
AvRecorder/test/basic/frame.h

@@ -0,0 +1,78 @@
+#ifndef __FRAME_H__
+#define __FRAME_H__
+#include "basic/basic.h"
+
+class __BasicFrame {
+public:
+    AVFrame* frame = nullptr;
+    __BasicFrame() = default;
+    __BasicFrame(__BasicFrame&& rhs) noexcept
+    {
+        frame = rhs.frame;
+        rhs.frame = nullptr;
+    }
+    __BasicFrame& operator=(__BasicFrame&& rhs)
+    {
+        Free(frame, [this] { av_frame_free(&frame); });
+        frame = rhs.frame;
+        rhs.frame = nullptr;
+        return *this;
+    }
+    __BasicFrame(const __BasicFrame& rhs) = delete;
+    __BasicFrame& operator=(const __BasicFrame& rhs) = delete;
+    ~__BasicFrame()
+    {
+        Free(frame, [this] { av_frame_free(&frame); });
+    }
+};
+
+template <MediaType mediaType>
+class Frame;
+
+template <>
+class Frame<MediaType::AUDIO> : public __BasicFrame {
+public:
+    static AVFrame* Alloc(AVSampleFormat sampleFmt,
+        const AVChannelLayout* channel_layout,
+        int sampleRate, int nbSamples);
+
+    Frame(AVSampleFormat sampleFmt,
+        const AVChannelLayout* channel_layout, int sampleRate,
+        int nbSamples);
+
+    Frame(AVFrame* frame);
+    Frame() = default;
+};
+
+template <>
+class Frame<MediaType::VIDEO> : public __BasicFrame {
+public:
+    static AVFrame* Alloc(AVPixelFormat pixFmt, int width, int height);
+    Frame(AVPixelFormat pixFmt, int width, int height);
+    Frame(AVFrame* frame);
+    Frame() = default;
+};
+
+struct SwsContext;
+
+class FfmpegConverter {
+private:
+    AVPixelFormat _from;
+    AVPixelFormat _to;
+
+public:
+    FfmpegConverter(AVPixelFormat from, AVPixelFormat to)
+        : _from(from)
+        , _to(to)
+    {
+    }
+    bool SetSize(int width, int height);
+    AVFrame* Trans(AVFrame* frameFrom);
+    ~FfmpegConverter();
+
+private:
+    AVFrame* _frameTo = nullptr;
+    SwsContext* _swsCtx = nullptr;
+};
+
+#endif

+ 88 - 0
AvRecorder/test/basic/timer.h

@@ -0,0 +1,88 @@
+#ifndef __TIMER_H__
+#define __TIMER_H__
+
+#include "basic/basic.h"
+
+#include <functional>
+
+class Timer
+{
+public:
+    ~Timer() { Stop(); }
+
+    // interval 为 0 表示时刻执行
+    template<typename Func>
+    void Start(int fps, Func&& func)
+    {
+        static_assert(std::is_convertible_v<Func, std::function<void()>>,
+                      "func need to be std::function<void()>");
+        _fps = fps;
+        _tickCnt = 0;
+        _isOverload = false;
+        __CheckNo(!_isRunning);
+        using namespace std::chrono;
+        _isRunning = true;
+        _beginTime = high_resolution_clock::now();
+        if (_fps > 0) {
+            auto task = [this, func = std::forward<Func>(func)]() mutable {
+                while (_isRunning) {
+                    // 这里不能直接使用整数除法
+                    // 因为整数除法有截断,导致最终睡眠的时间少一些
+                    uint64_t goalTime = int((double(1000.0) / _fps * _tickCnt) + 0.5);
+                    ++_tickCnt;
+                    auto nowTime = high_resolution_clock::now();
+                    auto duration = duration_cast<milliseconds>(nowTime - _beginTime).count();
+                    int64_t sleepTime = goalTime - duration;
+                    if (sleepTime > 0) {
+                        SleepMs(sleepTime);
+                    }
+#ifdef __AVDEBUG
+                    // else if (sleepTime < 0) {
+                    //     printf("Time out : %lld\n", -sleepTime);
+                    // }
+#endif
+                    _isOverload = -sleepTime > 1000; // 捕获的过载时间设置为 1s
+                    func();
+                }
+            };
+            _thread = new std::thread(std::move(task));
+            // timeBeginPeriod(1);
+            return;
+        }
+
+        auto task = [this, func = std::forward<Func>(func)]() mutable {
+            while (_isRunning) {
+                func();
+            }
+        };
+        _thread = new std::thread(std::move(task));
+    }
+
+    void Stop()
+    {
+        _isRunning = false;
+        if (_thread == nullptr) {
+            return;
+        }
+        // if (_fps > 0) {
+        //     timeEndPeriod(1);
+        // }
+        _thread->join();
+        delete _thread;
+
+        _thread = nullptr;
+    }
+
+    bool IsOverload() const { return _isOverload; }
+
+private:
+    int _fps = 100;
+    int _isRunning = false;
+    int _isOverload = false;
+    std::vector<int> vec;
+    std::chrono::time_point<std::chrono::high_resolution_clock> _beginTime;
+    std::thread* _thread = nullptr;
+    uint64_t _tickCnt = 0;
+};
+
+#endif

+ 182 - 0
AvRecorder/test/capturer/audio_capturer.cpp

@@ -0,0 +1,182 @@
+#include "audio_capturer.h"
+
+#include "basic/basic.h"
+
+#define DEFAULT_SAMPLE_RATE 48000        // 默认采样率:48kHz
+#define DEFAULT_BITS_PER_SAMPLE 16       // 默认位深:16bit
+#define DEFAULT_CHANNELS 1               // 默认音频通道数:1
+#define DEFAULT_AUDIO_PACKET_INTERVAL 10 // 默认音频包发送间隔:10ms
+
+bool AudioCapturer::Init(Type deviceType, CallBack callback, void* userInfo)
+{
+    Stop();
+    _userInfo = userInfo;
+    _callback = callback;
+    _deviceType = deviceType;
+    __CheckBool(_CreateDeviceEnumerator(&_pDeviceEnumerator));
+    __CheckBool(_CreateDevice(_pDeviceEnumerator, &_pDevice));
+    __CheckBool(_CreateAudioClient(_pDevice, &_pAudioClient));
+
+    if (!_IsFormatSupported(_pAudioClient)) {
+        __CheckBool(_GetPreferFormat(_pAudioClient, &_formatex));
+    }
+    __CheckBool(_InitAudioClient(_pAudioClient, &_formatex));
+    __CheckBool(_CreateAudioCaptureClient(_pAudioClient, &_pAudioCaptureClient));
+    _isInit = true;
+    return true;
+}
+
+bool AudioCapturer::Start()
+{
+    __CheckBool(_isInit);
+    _loopFlag = true;
+    // 用于强制打开扬声器
+    PlaySoundA("./rc/mute.wav", nullptr, SND_FILENAME | SND_ASYNC | SND_LOOP);
+    _captureThread = new std::thread(
+        [this] { _ThreadRun(_pAudioClient, _pAudioCaptureClient); });
+    return true;
+}
+
+void AudioCapturer::Stop()
+{
+    // CoUninitialize();
+    _isInit = false;
+    _loopFlag = false;
+    Free(_captureThread, [this] {
+        _captureThread->join();
+        delete _captureThread;
+    });
+    Free(_pAudioCaptureClient, [this] { _pAudioCaptureClient->Release(); });
+    if (_pAudioClient != nullptr) {
+        _pAudioClient->Stop();
+    }
+    PlaySoundA(nullptr, nullptr, SND_FILENAME | SND_ASYNC | SND_LOOP);
+
+    Free(_pAudioClient, [this] { _pAudioClient->Release(); });
+    Free(_pDevice, [this] { _pDevice->Release(); });
+    Free(_pDeviceEnumerator, [this] { _pDeviceEnumerator->Release(); });
+}
+
+bool AudioCapturer::_CreateDeviceEnumerator(IMMDeviceEnumerator** enumerator)
+{
+    // __CheckBool(SUCCEEDED(CoInitializeEx(nullptr, COINIT_MULTITHREADED)));
+    // __CheckBool(SUCCEEDED(CoInitializeEx(nullptr, COINIT_APARTMENTTHREADED)));
+    __CheckBool(SUCCEEDED(CoCreateInstance(__uuidof(MMDeviceEnumerator), NULL, CLSCTX_ALL,
+        __uuidof(IMMDeviceEnumerator),
+        reinterpret_cast<void**>(enumerator))));
+    return true;
+}
+bool AudioCapturer::_CreateDevice(IMMDeviceEnumerator* enumerator, IMMDevice** device)
+{
+    EDataFlow enDataFlow = _deviceType == Microphone ? eCapture : eRender;
+    ERole enRole = eConsole;
+    __CheckBool(SUCCEEDED(enumerator->GetDefaultAudioEndpoint(enDataFlow, enRole, device)));
+    return true;
+}
+bool AudioCapturer::_CreateAudioClient(IMMDevice* device, IAudioClient** audioClient)
+{
+    __CheckBool(SUCCEEDED(device->Activate(__uuidof(IAudioClient), CLSCTX_ALL, NULL,
+        (void**)audioClient)));
+    return true;
+}
+bool AudioCapturer::_IsFormatSupported(IAudioClient* audioClient)
+{
+    memset(&_formatex, 0, sizeof(_formatex));
+    WAVEFORMATEX* format = &_formatex.Format;
+    format->nSamplesPerSec = DEFAULT_SAMPLE_RATE;
+    format->wBitsPerSample = DEFAULT_BITS_PER_SAMPLE;
+    format->nChannels = DEFAULT_CHANNELS;
+
+    WAVEFORMATEX* closestMatch = nullptr;
+
+    HRESULT hr = audioClient->IsFormatSupported(AUDCLNT_SHAREMODE_SHARED,
+        format, &closestMatch);
+    if (hr == AUDCLNT_E_UNSUPPORTED_FORMAT) // 0x88890008
+    {
+        if (closestMatch == nullptr) // 如果找不到最相近的格式,closestMatch可能为nullptr
+        {
+            return false;
+        }
+
+        format->nSamplesPerSec = closestMatch->nSamplesPerSec;
+        format->wBitsPerSample = closestMatch->wBitsPerSample;
+        format->nChannels = closestMatch->nChannels;
+
+        return true;
+    }
+
+    return false;
+}
+bool AudioCapturer::_GetPreferFormat(IAudioClient* audioClient,
+    WAVEFORMATEXTENSIBLE* formatex)
+{
+    WAVEFORMATEX* format = nullptr;
+    __CheckBool(SUCCEEDED(audioClient->GetMixFormat(&format)));
+    formatex->Format.nSamplesPerSec = format->nSamplesPerSec;
+    formatex->Format.wBitsPerSample = format->wBitsPerSample;
+    formatex->Format.nChannels = format->nChannels;
+    return true;
+}
+bool AudioCapturer::_InitAudioClient(IAudioClient* audioClient,
+    WAVEFORMATEXTENSIBLE* formatex)
+{
+    AUDCLNT_SHAREMODE shareMode = AUDCLNT_SHAREMODE_SHARED; // share Audio Engine with other applications
+    DWORD streamFlags = _deviceType == Microphone ? 0 : AUDCLNT_STREAMFLAGS_LOOPBACK;
+    streamFlags |= AUDCLNT_STREAMFLAGS_AUTOCONVERTPCM;      // A channel matrixer and a sample
+                                                            // rate converter are inserted
+    streamFlags |= AUDCLNT_STREAMFLAGS_SRC_DEFAULT_QUALITY; // a sample rate converter
+                                                            // with better quality than
+                                                            // the default conversion but
+                                                            // with a higher performance
+                                                            // cost is used
+    REFERENCE_TIME hnsBufferDuration = 0;
+    WAVEFORMATEX* format = &formatex->Format;
+    format->wFormatTag = WAVE_FORMAT_EXTENSIBLE;
+    format->nBlockAlign = (format->wBitsPerSample >> 3) * format->nChannels;
+    format->nAvgBytesPerSec = format->nBlockAlign * format->nSamplesPerSec;
+    format->cbSize = sizeof(WAVEFORMATEXTENSIBLE) - sizeof(WAVEFORMATEX);
+    formatex->Samples.wValidBitsPerSample = format->wBitsPerSample;
+    formatex->dwChannelMask = format->nChannels == 1 ? KSAUDIO_SPEAKER_MONO : KSAUDIO_SPEAKER_STEREO;
+    formatex->SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
+
+    __CheckBool(SUCCEEDED(audioClient->Initialize(shareMode, streamFlags, hnsBufferDuration, 0,
+        format, nullptr)));
+    return true;
+}
+
+bool AudioCapturer::_CreateAudioCaptureClient(IAudioClient* audioClient,
+    IAudioCaptureClient** audioCaptureClient)
+{
+    __CheckBool(SUCCEEDED(audioClient->GetService(IID_PPV_ARGS(audioCaptureClient))));
+    return true;
+}
+
+bool AudioCapturer::_ThreadRun(IAudioClient* audio_client,
+    IAudioCaptureClient* audio_capture_client)
+{
+    UINT32 num_success = 0;
+    BYTE* p_audio_data = nullptr;
+    UINT32 num_frames_to_read = 0;
+    DWORD dw_flag = 0;
+    UINT32 num_frames_in_next_packet = 0;
+    audio_client->Start();
+    while (_loopFlag) {
+        SleepMs(5);
+        while (true) {
+            __CheckBool(SUCCEEDED(audio_capture_client->GetNextPacketSize(&num_frames_in_next_packet)));
+            if (num_frames_in_next_packet == 0) {
+                break;
+            }
+
+            __CheckBool(SUCCEEDED(audio_capture_client->GetBuffer(&p_audio_data, &num_frames_to_read,
+                &dw_flag, nullptr, nullptr)));
+
+            size_t size = (_formatex.Format.wBitsPerSample >> 3) * _formatex.Format.nChannels * num_frames_to_read;
+            _callback(p_audio_data, size, _userInfo);
+            __CheckBool(SUCCEEDED(audio_capture_client->ReleaseBuffer(num_frames_to_read)));
+        }
+    }
+
+    audio_client->Stop();
+    return true;
+}

+ 53 - 0
AvRecorder/test/capturer/audio_capturer.h

@@ -0,0 +1,53 @@
+
+#ifndef __AUDIO_CAPTURER_H__
+#define __AUDIO_CAPTURER_H__
+
+#include <audioclient.h>
+#include <combaseapi.h>
+#include <mmdeviceapi.h>
+
+#include <memory>
+#include <thread>
+
+class AudioCapturer {
+public:
+    enum Type {
+        Microphone,
+        Speaker
+    };
+    using CallBack = void (*)(void* data, size_t size, void* userInfo);
+
+    bool Init(Type deviceType, CallBack callback, void* userInfo = nullptr);
+    bool Start();
+    const WAVEFORMATEX& GetFormat() const { return _formatex.Format; }
+
+    void Stop();
+
+private:
+    bool _isInit = false;
+    CallBack _callback;
+    Type _deviceType;
+    IMMDeviceEnumerator* _pDeviceEnumerator = nullptr;
+    IMMDevice* _pDevice = nullptr;
+    IAudioClient* _pAudioClient = nullptr;
+    IAudioCaptureClient* _pAudioCaptureClient = nullptr;
+    std::thread* _captureThread = nullptr;
+    bool _loopFlag = false;
+    WAVEFORMATEXTENSIBLE _formatex;
+    void* _userInfo = nullptr;
+
+    bool _CreateDeviceEnumerator(IMMDeviceEnumerator** enumerator);
+    bool _CreateDevice(IMMDeviceEnumerator* enumerator, IMMDevice** device);
+    bool _CreateAudioClient(IMMDevice* device, IAudioClient** audioClient);
+    bool _IsFormatSupported(IAudioClient* audioClient);
+    bool _GetPreferFormat(IAudioClient* audioClient,
+        WAVEFORMATEXTENSIBLE* formatex);
+    bool _InitAudioClient(IAudioClient* audioClient,
+        WAVEFORMATEXTENSIBLE* formatex);
+    bool _CreateAudioCaptureClient(IAudioClient* audioClient,
+        IAudioCaptureClient** audioCaptureClient);
+    bool _ThreadRun(IAudioClient* audio_client,
+        IAudioCaptureClient* audio_capture_client);
+};
+
+#endif

+ 226 - 0
AvRecorder/test/capturer/dxgi_capturer.cpp

@@ -0,0 +1,226 @@
+#include "dxgi_capturer.h"
+#include <windows.h>
+
+DxgiCapturer::DxgiCapturer()
+{
+    ZeroMemory(&_desc, sizeof(_desc));
+}
+
+DxgiCapturer::~DxgiCapturer()
+{
+    Close();
+}
+
+bool DxgiCapturer::Open(int left, int top, int width, int height)
+{
+    Close();
+    HRESULT hr = S_OK;
+    _isAttached = false;
+
+    if (_bInit) {
+        return false;
+    }
+
+    // Driver types supported
+    D3D_DRIVER_TYPE DriverTypes[] = {
+        D3D_DRIVER_TYPE_HARDWARE,
+        D3D_DRIVER_TYPE_WARP,
+        D3D_DRIVER_TYPE_REFERENCE,
+    };
+    UINT NumDriverTypes = ARRAYSIZE(DriverTypes);
+
+    // Feature levels supported
+    D3D_FEATURE_LEVEL FeatureLevels[] = {
+        D3D_FEATURE_LEVEL_11_0,
+        D3D_FEATURE_LEVEL_10_1,
+        D3D_FEATURE_LEVEL_10_0,
+        D3D_FEATURE_LEVEL_9_1};
+    UINT NumFeatureLevels = ARRAYSIZE(FeatureLevels);
+
+    D3D_FEATURE_LEVEL FeatureLevel;
+
+    // Create D3D device
+    for (UINT DriverTypeIndex = 0; DriverTypeIndex < NumDriverTypes; ++DriverTypeIndex) {
+        hr = D3D11CreateDevice(nullptr, DriverTypes[DriverTypeIndex], nullptr, 0, FeatureLevels,
+            NumFeatureLevels, D3D11_SDK_VERSION, &_hDevice, &FeatureLevel, &_hContext);
+        if (SUCCEEDED(hr)) {
+            break;
+        }
+    }
+    __CheckBool(SUCCEEDED(hr));
+
+    // Get DXGI device
+    IDXGIDevice* hDxgiDevice = nullptr;
+    __CheckBool(SUCCEEDED(_hDevice->QueryInterface(__uuidof(IDXGIDevice), reinterpret_cast<void**>(&hDxgiDevice))));
+
+    // Get DXGI adapter
+    IDXGIAdapter* hDxgiAdapter = nullptr;
+    hr = hDxgiDevice->GetParent(__uuidof(IDXGIAdapter), reinterpret_cast<void**>(&hDxgiAdapter));
+    Free(hDxgiDevice, [=] { hDxgiDevice->Release(); });
+    __CheckBool(SUCCEEDED(hr));
+
+    // Get output
+    INT nOutput = 0;
+    IDXGIOutput* hDxgiOutput = nullptr;
+    DXGI_OUTPUT_DESC dxgiOutDesc;
+    ZeroMemory(&dxgiOutDesc, sizeof(dxgiOutDesc));
+
+    for (int idx = 0; SUCCEEDED(hr = hDxgiAdapter->EnumOutputs(idx, &hDxgiOutput)); ++idx) {
+        // get output description struct
+        hDxgiOutput->GetDesc(&dxgiOutDesc);
+        if (dxgiOutDesc.DesktopCoordinates.left == left
+            && dxgiOutDesc.DesktopCoordinates.top == top) { // 寻找显示器
+            break;
+        }
+    }
+    Free(hDxgiAdapter, [=] { hDxgiAdapter->Release(); });
+    __CheckBool(SUCCEEDED(hr));
+
+    // QI for Output 1
+    IDXGIOutput1* hDxgiOutput1 = nullptr;
+    hr = hDxgiOutput->QueryInterface(__uuidof(hDxgiOutput1), reinterpret_cast<void**>(&hDxgiOutput1));
+    Free(hDxgiOutput, [=] { hDxgiOutput->Release(); });
+    __CheckBool(SUCCEEDED(hr));
+
+    // Create desktop duplication
+    hr = hDxgiOutput1->DuplicateOutput(_hDevice, &_hDeskDupl);
+    Free(hDxgiOutput1, [=] { hDxgiOutput1->Release(); });
+    __CheckBool(SUCCEEDED(hr));
+
+    // Set ColorSpace
+    D3D11_VIDEO_PROCESSOR_COLOR_SPACE inputColorSpace;
+    inputColorSpace.Usage = 1;
+    inputColorSpace.RGB_Range = 0;
+    inputColorSpace.YCbCr_Matrix = 1;
+    inputColorSpace.YCbCr_xvYCC = 0;
+    inputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_0_255;
+
+    D3D11_VIDEO_PROCESSOR_COLOR_SPACE outputColorSpace;
+    outputColorSpace.Usage = 0;
+    outputColorSpace.RGB_Range = 0;
+    outputColorSpace.YCbCr_Matrix = 1;
+    outputColorSpace.YCbCr_xvYCC = 0;
+    outputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_16_235;
+    _rgbToNv12.Open(_hDevice, _hContext, inputColorSpace, outputColorSpace);
+    _nv12Frame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_NV12, width, height);
+    _xrgbFrame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_BGR0, width, height);
+    __CheckBool(_nv12Frame);
+    __CheckBool(_xrgbFrame);
+    // 初始化成功
+    _bInit = true;
+    return true;
+}
+void DxgiCapturer::Close()
+{
+    if (!_bInit) {
+        return;
+    }
+
+    _bInit = false;
+    _nv12Buffers.Clear();
+    _xrgbBuffers.Clear();
+    _rgbToNv12.Close();
+    Free(_nv12Frame, [this] { av_frame_free(&_nv12Frame); });
+    Free(_xrgbFrame, [this] { av_frame_free(&_xrgbFrame); });
+    Free(_hDeskDupl, [this] { _hDeskDupl->Release(); });
+    Free(_hDevice, [this] { _hDevice->Release(); });
+    Free(_hContext, [this] { _hContext->Release(); });
+}
+
+HDC DxgiCapturer::GetHdc()
+{
+    _isCaptureSuccess = false;
+    if (!_bInit) {
+        return nullptr;
+    }
+
+    IDXGIResource* hDesktopResource = nullptr;
+    DXGI_OUTDUPL_FRAME_INFO FrameInfo;
+    HRESULT hr = _hDeskDupl->AcquireNextFrame(0, &FrameInfo, &hDesktopResource);
+    if (FAILED(hr)) {
+        if (hr == DXGI_ERROR_WAIT_TIMEOUT) { // 这里是因为当桌面没有动画更新时就会有一个错误值,不进行错误打印
+            return nullptr;
+        }
+        return nullptr;
+    }
+
+    // query next frame staging buffer
+    ID3D11Texture2D* srcImage = nullptr;
+    hr = hDesktopResource->QueryInterface(__uuidof(ID3D11Texture2D), reinterpret_cast<void**>(&srcImage));
+    Free(hDesktopResource, [=] { hDesktopResource->Release(); });
+    __CheckNullptr(SUCCEEDED(hr));
+
+    srcImage->GetDesc(&_desc);
+
+    // create a new staging buffer for fill frame image
+    auto desc = _desc;
+    desc.ArraySize = 1;
+    desc.BindFlags = D3D11_BIND_FLAG::D3D11_BIND_RENDER_TARGET;
+    desc.MiscFlags = D3D11_RESOURCE_MISC_GDI_COMPATIBLE;
+    desc.SampleDesc.Count = 1;
+    desc.SampleDesc.Quality = 0;
+    desc.MipLevels = 1;
+    desc.CPUAccessFlags = 0;
+    desc.Usage = D3D11_USAGE_DEFAULT;
+    hr = _hDevice->CreateTexture2D(&desc, nullptr, &_gdiImage);
+    if (FAILED(hr)) {
+        __DebugPrint("Create _gdiImage failed");
+        Free(srcImage, [=] { srcImage->Release(); });
+        Free(_hDeskDupl, [this] { _hDeskDupl->ReleaseFrame(); });
+        return nullptr;
+    }
+
+    // copy next staging buffer to new staging buffer
+    _hContext->CopyResource(_gdiImage, srcImage);
+    Free(srcImage, [=] { srcImage->Release(); });
+    _hDeskDupl->ReleaseFrame();
+
+    // create staging buffer for map bits
+    _hStagingSurf = nullptr;
+    hr = _gdiImage->QueryInterface(__uuidof(IDXGISurface), (void**)(&_hStagingSurf));
+    if (FAILED(hr)) {
+        __DebugPrint("_gdiImage->QueryInterface failed");
+        Free(_gdiImage, [this] { _gdiImage->Release(); });
+        return nullptr;
+    }
+
+    _isCaptureSuccess = true;
+    HDC hdc = nullptr;
+    // if GetDc is failed, the hdc is nullptr
+    _hStagingSurf->GetDC(FALSE, &hdc);
+    return hdc;
+}
+
+AVFrame* DxgiCapturer::GetFrame()
+{
+    if (!_isCaptureSuccess) {
+        return nullptr;
+    }
+    _isCaptureSuccess = false;
+    _hStagingSurf->ReleaseDC(nullptr);
+
+    // 创建一个临时的纹理
+    ID3D11Texture2D* tmpImage = nullptr;
+    _desc.MiscFlags = 2050;
+    __CheckNullptr(SUCCEEDED(_hDevice->CreateTexture2D(&_desc, nullptr, &tmpImage)));
+    _hContext->CopyResource(tmpImage, _gdiImage);
+
+    // 首先尝试创建 NV12 纹理
+    AVFrame* frame = nullptr;
+    auto tmpFormat = _desc.Format;
+    _desc.Format = DXGI_FORMAT_NV12;
+    if (GenNv12Frame(_hDevice, _hContext, _desc, tmpImage,
+            _nv12Buffers, _nv12Frame, _rgbToNv12)) {
+        frame = _nv12Frame;
+    } else {
+        _desc.Format = tmpFormat;
+        GenRgbFrame(_hDevice, _hContext, _desc, _gdiImage,
+            _xrgbBuffers, _xrgbFrame);
+        frame = _xrgbFrame;
+    }
+    Free(_hStagingSurf, [this] { _hStagingSurf->Release(); });
+    Free(tmpImage, [&tmpImage] { tmpImage->Release(); });
+    Free(_gdiImage, [this] { _gdiImage->Release(); });
+
+    return frame;
+}

+ 39 - 0
AvRecorder/test/capturer/dxgi_capturer.h

@@ -0,0 +1,39 @@
+#ifndef __DXGI_CAPTURER_H__
+#define __DXGI_CAPTURER_H__
+
+#include <d3d11.h>
+#include <dxgi1_2.h>
+
+#include "d3d/gen_frame.h"
+class DxgiCapturer {
+public:
+    DxgiCapturer();
+    ~DxgiCapturer();
+
+public:
+    bool Open(int left, int top, int width, int height);
+    void Close();
+
+public:
+    HDC GetHdc();
+    AVFrame* GetFrame();
+
+private:
+    bool _bInit = false;
+    bool _isCaptureSuccess = false;
+
+    ID3D11Device* _hDevice = nullptr;
+    ID3D11DeviceContext* _hContext = nullptr;
+    IDXGIOutputDuplication* _hDeskDupl = nullptr;
+    IDXGISurface1* _hStagingSurf = nullptr;
+    ID3D11Texture2D* _gdiImage = nullptr;
+    D3D11_TEXTURE2D_DESC _desc;
+    bool _isAttached = false;
+    AVFrame* _xrgbFrame = nullptr;
+    AVFrame* _nv12Frame = nullptr;
+    BufferFiller _xrgbBuffers;
+    BufferFiller _nv12Buffers;
+    D3dConverter _rgbToNv12;
+};
+
+#endif

+ 98 - 0
AvRecorder/test/capturer/finder.cpp

@@ -0,0 +1,98 @@
+#include "finder.h"
+
+#include <Windows.h>
+#include <array>
+
+const std::vector<WindowFinder::Info>& WindowFinder::GetList(bool isUpdate)
+{
+    if (!isUpdate) {
+        return _list;
+    }
+    _list.clear();
+    EnumWindows(_EnumWindowsProc, (LPARAM) nullptr);
+    return _list;
+}
+
+std::vector<WindowFinder::Info> WindowFinder::_list;
+
+std::wstring WindowFinder::_GetWindowTextStd(HWND hwnd)
+{
+    std::array<WCHAR, 1024> windowText;
+    ::GetWindowTextW(hwnd, windowText.data(), (int)windowText.size());
+    std::wstring title(windowText.data());
+    return title;
+}
+BOOL CALLBACK WindowFinder::_EnumWindowsProc(HWND hwnd, LPARAM lParam)
+{
+    auto title = _GetWindowTextStd(hwnd);
+    if (!IsAltTabWindow(hwnd, title)) {
+        return TRUE;
+    }
+    _list.push_back({hwnd, std::move(title)});
+    return TRUE;
+}
+
+bool WindowFinder::IsAltTabWindow(HWND hwnd, const std::wstring& title)
+{
+    HWND shellWindow = GetShellWindow();
+
+    if (hwnd == shellWindow) {
+        return false;
+    }
+
+    if (title.length() == 0 || title == L"NVIDIA GeForce Overlay") {
+        return false;
+    }
+
+    if (!IsWindowVisible(hwnd)) {
+        return false;
+    }
+
+    if (GetAncestor(hwnd, GA_ROOT) != hwnd) {
+        return false;
+    }
+
+    LONG style = GetWindowLong(hwnd, GWL_STYLE);
+    if (!((style & WS_DISABLED) != WS_DISABLED)) {
+        return false;
+    }
+
+    DWORD cloaked = FALSE;
+    HRESULT hrTemp = DwmGetWindowAttribute(hwnd, DWMWA_CLOAKED, &cloaked, sizeof(cloaked));
+    if (SUCCEEDED(hrTemp) && cloaked == DWM_CLOAKED_SHELL) {
+        return false;
+    }
+
+    return !IsIconic(hwnd);
+}
+
+const std::vector<MonitorFinder::Info>& MonitorFinder::GetList(bool isUpdate)
+{
+    if (!isUpdate) {
+        return _list;
+    }
+    _list.clear();
+    EnumDisplayMonitors(nullptr, nullptr, _MonitorEnumProc, (LPARAM) nullptr);
+    return _list;
+}
+
+std::vector<MonitorFinder::Info> MonitorFinder::_list;
+
+BOOL CALLBACK MonitorFinder::_MonitorEnumProc(
+    HMONITOR hMonitor,  // handle to display monitor
+    HDC hdcMonitor,     // handle to monitor-appropriate device context
+    LPRECT lprcMonitor, // pointer to monitor intersection rectangle
+    LPARAM dwData       // data passed from EnumDisplayMonitors
+)
+{
+    std::wstring name = L"显示器" + std::to_wstring(_list.size() + 1);
+    MONITORINFO monitorInfo;
+    monitorInfo.cbSize = sizeof(monitorInfo);
+    GetMonitorInfoW(hMonitor, &monitorInfo);
+    Info info;
+    info.monitor = hMonitor;
+    info.rect = monitorInfo.rcMonitor;
+    info.title = std::move(name);
+    _list.push_back(std::move(info));
+    return TRUE;
+}

+ 43 - 0
AvRecorder/test/capturer/finder.h

@@ -0,0 +1,43 @@
+#pragma once
+
+#include <dwmapi.h>
+#include <string>
+#include <vector>
+#include <d3d11.h>
+
+class WindowFinder {
+public:
+    struct Info {
+        HWND hwnd = nullptr;
+        std::wstring title;
+    };
+
+    static const std::vector<Info>& GetList(bool isUpdate = false);
+
+private:
+    static std::vector<Info> _list;
+    static std::wstring _GetWindowTextStd(HWND hwnd);
+    static BOOL CALLBACK _EnumWindowsProc(HWND hwnd, LPARAM lParam);
+    static bool IsAltTabWindow(HWND hwnd, const std::wstring& title);
+};
+
+class MonitorFinder {
+public:
+    struct Info {
+        HMONITOR monitor = nullptr;
+        std::wstring title;
+        RECT rect;
+    };
+
+    static const std::vector<Info>& GetList(bool isUpdate = false);
+
+private:
+    static std::vector<Info> _list;
+
+    static BOOL CALLBACK _MonitorEnumProc(
+        HMONITOR hMonitor,  // handle to display monitor
+        HDC hdcMonitor,     // handle to monitor-appropriate device context
+        LPRECT lprcMonitor, // pointer to monitor intersection rectangle
+        LPARAM dwData       // data passed from EnumDisplayMonitors
+    );
+};

+ 56 - 0
AvRecorder/test/capturer/gdi_capturer.cpp

@@ -0,0 +1,56 @@
+
+#include "gdi_capturer.h"
+#include "basic/basic.h"
+
+bool GdiCapturer::Open(HWND hwnd, int width, int height)
+{
+    Close();
+    m_width = width;
+    m_height = height;
+    _srcHdc = GetWindowDC(hwnd);
+    _dstHdc = CreateCompatibleDC(_srcHdc);
+    _bitmap = CreateCompatibleBitmap(_srcHdc, width, height);
+    SelectObject(_dstHdc, _bitmap);
+
+    _bitmapInfo.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
+    _bitmapInfo.bmiHeader.biPlanes = 1;
+    _bitmapInfo.bmiHeader.biBitCount = 24;
+    _bitmapInfo.bmiHeader.biWidth = width;
+    _bitmapInfo.bmiHeader.biHeight = height;
+    _bitmapInfo.bmiHeader.biCompression = BI_RGB;
+    _bitmapInfo.bmiHeader.biSizeImage = width * height;
+
+    // 创建缓存帧
+    _frame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_BGR24, width, height);
+    return true;
+}
+
+HDC GdiCapturer::GetHdc(int borderWidth, int borderHeight)
+{
+    __CheckNullptr(
+        BitBlt(_dstHdc, 0, 0, m_width, m_height,
+            _srcHdc, borderWidth / 2, borderHeight - borderWidth / 2, SRCCOPY));
+
+    return _dstHdc;
+}
+
+AVFrame* GdiCapturer::GetFrame()
+{
+    auto linesize = _frame->linesize[0];
+    for (int row = 0; row < m_height; ++row) {
+        __CheckNullptr(GetDIBits(_dstHdc, _bitmap, m_height - 1 - row, 1, _frame->data[0] + row * linesize, &_bitmapInfo, DIB_RGB_COLORS));
+    }
+    return _frame;
+}
+
+void GdiCapturer::Close()
+{
+    Free(_frame, [this] { av_frame_free(&_frame); });
+    Free(_dstHdc, [this] { DeleteObject(_dstHdc); });
+    Free(_bitmap, [this] { DeleteObject(_bitmap); });
+}
+
+GdiCapturer::~GdiCapturer()
+{
+    Close();
+}

+ 26 - 0
AvRecorder/test/capturer/gdi_capturer.h

@@ -0,0 +1,26 @@
+#ifndef __GDI_CAPTURER_H__
+#define __GDI_CAPTURER_H__
+
+#include "basic/frame.h"
+
+#include <Windows.h>
+
+class GdiCapturer {
+public:
+    bool Open(HWND hwnd, int width, int height);
+    HDC GetHdc(int borderWidth, int borderHeight);
+    AVFrame* GetFrame();
+    void Close();
+    ~GdiCapturer();
+
+private:
+    HDC _srcHdc = nullptr;
+    HDC _dstHdc = nullptr;
+    HBITMAP _bitmap = nullptr;
+    BITMAPINFO _bitmapInfo;
+    int m_width = 0;
+    int m_height = 0;
+    AVFrame* _frame = nullptr;
+};
+
+#endif

+ 145 - 0
AvRecorder/test/capturer/video_capturer.cpp

@@ -0,0 +1,145 @@
+#include "video_capturer.h"
+#include "capturer/finder.h"
+
+bool VideoCapturer::Open(HWND hwnd, Method method)
+{
+    Close();
+    __CheckBool(hwnd);
+    m_srcHwnd = hwnd;
+    __CheckBool(_GetHwndSize(m_srcHwnd));
+    m_usingMethod = method;
+    m_type = WINDOW;
+    switch (method) {
+    case WGC: {
+        m_wgcCapturer = WgcCapturer::New();
+        __CheckBool(m_wgcCapturer->StartCapturerWindow(hwnd, m_width, m_height));
+        break;
+    }
+
+    default: { // GDI
+        m_gdiCapturer = new GdiCapturer;
+        __CheckBool(m_gdiCapturer->Open(hwnd, m_width, m_height));
+        break;
+    }
+    }
+
+    return true;
+}
+
+bool VideoCapturer::Open(int monitorIdx, Method method)
+{
+    Close();
+    auto&& monitorInfo = MonitorFinder::GetList()[monitorIdx];
+    m_rect = monitorInfo.rect;
+    m_borderHeight = 0;
+    m_borderWidth = 0;
+    m_width = m_rect.right - m_rect.left;
+    m_height = m_rect.bottom - m_rect.top;
+    m_usingMethod = method;
+    m_type = MONITOR;
+    switch (method) {
+    case WGC: {
+        auto monitor = monitorInfo.monitor;
+        m_wgcCapturer = WgcCapturer::New();
+        __CheckBool(m_wgcCapturer->StartCapturerMonitor(monitor, m_width, m_height));
+        break;
+    }
+
+    default: { // DXGI
+        m_dxgiCapturer = new DxgiCapturer;
+        __CheckBool(m_dxgiCapturer->Open(m_rect.left, m_rect.top, m_width, m_height));
+        break;
+    }
+    }
+    return true;
+}
+
+AVFrame* VideoCapturer::GetFrame()
+{
+    switch (m_usingMethod) {
+    case WGC: // 该捕获方式自动就将鼠标画好了,我们不需要再自己画鼠标
+        return m_wgcCapturer->GetFrame();
+    case DXGI: {
+        auto hdc = m_dxgiCapturer->GetHdc();
+        if (m_isDrawCursor && hdc) {
+            _DrawCursor(hdc);
+        }
+        return m_dxgiCapturer->GetFrame();
+    }
+    default: // GDI
+        auto hdc = m_gdiCapturer->GetHdc(m_borderWidth, m_borderHeight);
+        if (m_isDrawCursor && hdc) {
+            _DrawCursor(hdc);
+        }
+        return m_gdiCapturer->GetFrame();
+    }
+}
+
+void VideoCapturer::SetDrawCursor(bool isDrawCursor)
+{
+    m_isDrawCursor = isDrawCursor;
+    if (m_usingMethod == WGC) {
+        m_wgcCapturer->SetDrawCursor(m_isDrawCursor);
+    }
+}
+
+void VideoCapturer::Close()
+{
+    Free(m_dxgiCapturer, [this] { m_dxgiCapturer->Close(); delete m_dxgiCapturer; });
+    Free(m_gdiCapturer, [this] { m_gdiCapturer->Close(); delete m_gdiCapturer; });
+    Free(m_wgcCapturer, [this] { m_wgcCapturer->Close(); });
+}
+
+VideoCapturer::~VideoCapturer()
+{
+    Close();
+}
+
+int VideoCapturer::GetWidth() const
+{
+    return m_width;
+}
+int VideoCapturer::GetHeight() const
+{
+    return m_height;
+}
+
+bool VideoCapturer::_GetHwndSize(HWND hwnd)
+{
+    RECT rect;
+    __CheckBool(GetClientRect(hwnd, &rect));
+    m_rect = rect;
+    m_width = (rect.right - rect.left);
+    m_height = (rect.bottom - rect.top);
+    __CheckBool(GetWindowRect(hwnd, &rect));
+    m_borderHeight = rect.bottom - rect.top - m_height;
+    m_borderWidth = rect.right - rect.left - m_width;
+    if (m_borderHeight < 0) {
+        m_borderHeight = 0;
+    }
+    if (m_borderWidth < 0) {
+        m_borderWidth = 0;
+    }
+    return true;
+}
+
+void VideoCapturer::_DrawCursor(HDC hdc)
+{
+    CURSORINFO ci;
+    ci.cbSize = sizeof(CURSORINFO);
+    __CheckNo(GetCursorInfo(&ci));
+    int cursorX = ci.ptScreenPos.x;
+    int cursorY = ci.ptScreenPos.y;
+
+    if (cursorX > m_rect.right || cursorX < m_rect.left
+        || cursorY > m_rect.bottom || cursorY < m_rect.top) {
+        return; // 超出显示范围
+    }
+
+    if (ci.flags == CURSOR_SHOWING) {
+        // 将光标画到屏幕所在位置
+        int x = cursorX - m_rect.left;
+        int y = cursorY - m_rect.top;
+        __CheckNo(DrawIconEx(hdc, x, y, ci.hCursor, 0, 0, 0, NULL, DI_NORMAL | DI_COMPAT));
+    }
+}

+ 46 - 0
AvRecorder/test/capturer/video_capturer.h

@@ -0,0 +1,46 @@
+#ifndef __AV_CAPTURER_H__
+#define __AV_CAPTURER_H__
+
+#include "dxgi_capturer.h"
+#include "gdi_capturer.h"
+#include "wgc_capturer.h"
+
+class VideoCapturer {
+public:
+    enum Method {
+        GDI,
+        DXGI,
+        WGC
+    };
+
+    enum Type {
+        WINDOW,
+        MONITOR
+    };
+    ~VideoCapturer();
+    bool Open(HWND hwnd, Method method);
+    bool Open(int monitorIdx, Method method);
+    AVFrame* GetFrame();
+    void SetDrawCursor(bool isDrawCursor);
+    void Close();
+    int GetWidth() const;
+    int GetHeight() const;
+    Method GetMethod() const { return m_usingMethod; }
+
+private:
+    bool _GetHwndSize(HWND hwnd);
+    void _DrawCursor(HDC hdc);
+    Method m_usingMethod = WGC;
+    RECT m_rect;
+    Type m_type = MONITOR;
+    DxgiCapturer* m_dxgiCapturer = nullptr;
+    GdiCapturer* m_gdiCapturer = nullptr;
+    WgcCapturer* m_wgcCapturer = nullptr;
+    int m_width = 0;
+    int m_height = 0;
+    int m_borderHeight = 0;
+    int m_borderWidth = 0;
+    HWND m_srcHwnd = nullptr;
+    bool m_isDrawCursor = true;
+};
+#endif

+ 88 - 0
AvRecorder/test/capturer/wgc/App.cpp

@@ -0,0 +1,88 @@
+#include "App.h"
+// D3D
+#include <d2d1_3.h>
+#include <d3d11_4.h>
+#include <dxgi1_6.h>
+#include <wincodec.h>
+
+#include "pch.h"
+
+#include "basic/frame.h"
+
+using namespace winrt;
+using namespace Windows::System;
+using namespace Windows::Foundation;
+using namespace Windows::UI;
+using namespace Windows::UI::Composition;
+using namespace Windows::Graphics::Capture;
+
+void App::Initialize(ContainerVisual const& root)
+{
+    auto queue = DispatcherQueue::GetForCurrentThread();
+
+    m_compositor = root.Compositor();
+    m_root = m_compositor.CreateContainerVisual();
+    m_content = m_compositor.CreateSpriteVisual();
+    m_brush = m_compositor.CreateSurfaceBrush();
+
+    m_root.RelativeSizeAdjustment({1, 1});
+    root.Children().InsertAtTop(m_root);
+
+    m_content.AnchorPoint({0.5f, 0.5f});
+    m_content.RelativeOffsetAdjustment({0.5f, 0.5f, 0});
+    m_content.RelativeSizeAdjustment({1, 1});
+    m_content.Size({-80, -80});
+    m_content.Brush(m_brush);
+    m_brush.HorizontalAlignmentRatio(0.5f);
+    m_brush.VerticalAlignmentRatio(0.5f);
+    m_brush.Stretch(CompositionStretch::Uniform);
+    auto shadow = m_compositor.CreateDropShadow();
+    shadow.Mask(m_brush);
+    m_content.Shadow(shadow);
+    m_root.Children().InsertAtTop(m_content);
+
+    auto d3dDevice = CreateD3DDevice();
+    auto dxgiDevice = d3dDevice.as<IDXGIDevice>();
+    m_device = CreateDirect3DDevice(dxgiDevice.get());
+}
+
+void App::Close()
+{
+    if (m_capture) {
+        m_capture->Close();
+        delete m_capture;
+        m_capture = nullptr;
+    }
+}
+
+bool App::StartCaptureWindow(HWND hwnd, int width, int height)
+{
+    Close();
+    auto item = CreateCaptureItemForWindow(hwnd);
+    __CheckBool(item);
+    m_capture = new SimpleCapture(m_device, item, width, height);
+    auto surface = m_capture->CreateSurface(m_compositor);
+    m_brush.Surface(surface);
+    m_capture->StartCapture();
+    return true;
+}
+
+void App::SetDrawCursor(bool isDrawCursor)
+{
+    if (m_capture == nullptr) {
+        return;
+    }
+    m_capture->SetDrawCursor(isDrawCursor);
+}
+
+bool App::StartCaptureMonitor(HMONITOR monitor, int width, int height)
+{
+    Close();
+    auto item = CreateCaptureItemForMonitor(monitor);
+    __CheckBool(item);
+    m_capture = new SimpleCapture(m_device, item, width, height);
+    auto surface = m_capture->CreateSurface(m_compositor);
+    m_brush.Surface(surface);
+    m_capture->StartCapture();
+    return true;
+}

+ 46 - 0
AvRecorder/test/capturer/wgc/App.h

@@ -0,0 +1,46 @@
+#pragma once
+
+#include <guiddef.h>
+// WinRT
+
+#include <winrt/Windows.Foundation.Numerics.h>
+#include <winrt/Windows.Foundation.h>
+#include <winrt/Windows.Graphics.Capture.h>
+#include <winrt/Windows.Graphics.DirectX.Direct3d11.h>
+#include <winrt/Windows.Graphics.DirectX.h>
+#include <winrt/Windows.Graphics.Imaging.h>
+#include <winrt/Windows.Storage.Streams.h>
+#include <winrt/Windows.Storage.h>
+#include <winrt/Windows.System.h>
+#include <winrt/Windows.UI.Composition.Desktop.h>
+#include <winrt/Windows.UI.Composition.h>
+#include <winrt/Windows.UI.Popups.h>
+#include <winrt/Windows.UI.h>
+
+#include <winrt/Windows.Foundation.Collections.h>
+
+#include "SimpleCapture.h"
+
+class App {
+public:
+    App() { }
+    ~App() { }
+
+    void Initialize(
+        winrt::Windows::UI::Composition::ContainerVisual const& root);
+
+    bool StartCaptureWindow(HWND hwnd, int width, int height);
+    bool StartCaptureMonitor(HMONITOR monitor, int width, int height);
+    void SetDrawCursor(bool isDrawCursor);
+    void Close();
+    AVFrame* GetFrame() { return m_capture->GetFrame(); }
+
+private:
+    winrt::Windows::UI::Composition::Compositor m_compositor {nullptr};
+    winrt::Windows::UI::Composition::ContainerVisual m_root {nullptr};
+    winrt::Windows::UI::Composition::SpriteVisual m_content {nullptr};
+    winrt::Windows::UI::Composition::CompositionSurfaceBrush m_brush {nullptr};
+
+    winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice m_device {nullptr};
+    SimpleCapture* m_capture = nullptr;
+};

+ 175 - 0
AvRecorder/test/capturer/wgc/SimpleCapture.cpp

@@ -0,0 +1,175 @@
+
+// D3D
+#include <d3d11_4.h>
+#include <dxgi1_6.h>
+#include <d2d1_3.h>
+#include <wincodec.h>
+
+#include "pch.h"
+#include "SimpleCapture.h"
+#include "basic/basic.h"
+
+using namespace winrt;
+using namespace Windows;
+using namespace Windows::Foundation;
+using namespace Windows::System;
+using namespace Windows::Graphics;
+using namespace Windows::Graphics::Capture;
+using namespace Windows::Graphics::DirectX;
+using namespace Windows::Graphics::DirectX::Direct3D11;
+using namespace Windows::Foundation::Numerics;
+using namespace Windows::UI;
+using namespace Windows::UI::Composition;
+
+#undef min
+#undef max
+
+SimpleCapture::SimpleCapture(
+    IDirect3DDevice const& device,
+    GraphicsCaptureItem const& item,
+    int width, int height)
+{
+    m_item = item;
+    m_device = device;
+
+    // Set up
+    auto d3dDevice = GetDXGIInterfaceFromObject<ID3D11Device>(m_device);
+    d3dDevice->GetImmediateContext(m_d3dContext.put());
+    auto size = m_item.Size();
+
+    m_swapChain = CreateDXGISwapChain(
+        d3dDevice,
+        static_cast<uint32_t>(size.Width),
+        static_cast<uint32_t>(size.Height),
+        static_cast<DXGI_FORMAT>(DirectXPixelFormat::B8G8R8A8UIntNormalized),
+        2);
+
+    // Create framepool, define pixel format (DXGI_FORMAT_B8G8R8A8_UNORM), and frame size.
+    m_framePool = Direct3D11CaptureFramePool::Create(
+        m_device,
+        DirectXPixelFormat::B8G8R8A8UIntNormalized,
+        2,
+        size);
+
+    m_session = m_framePool.CreateCaptureSession(m_item);
+    m_lastSize = size;
+    m_frameArrived = m_framePool.FrameArrived(auto_revoke, {this, &SimpleCapture::OnFrameArrived});
+
+    // Set ColorSpace
+    D3D11_VIDEO_PROCESSOR_COLOR_SPACE inputColorSpace;
+    inputColorSpace.Usage = 1;
+    inputColorSpace.RGB_Range = 0;
+    inputColorSpace.YCbCr_Matrix = 1;
+    inputColorSpace.YCbCr_xvYCC = 0;
+    inputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_0_255;
+
+    D3D11_VIDEO_PROCESSOR_COLOR_SPACE outputColorSpace;
+    outputColorSpace.Usage = 0;
+    outputColorSpace.RGB_Range = 0;
+    outputColorSpace.YCbCr_Matrix = 1;
+    outputColorSpace.YCbCr_xvYCC = 0;
+    outputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_16_235;
+    m_rgbToNv12.Open(d3dDevice.get(), m_d3dContext.get(), inputColorSpace, outputColorSpace);
+    m_nv12Frame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_NV12, width, height);
+    m_xrgbFrame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_BGR0, width, height);
+    __CheckNo(m_nv12Frame);
+    __CheckNo(m_xrgbFrame);
+    m_isCapture = true;
+    m_cnt = 5;
+}
+
+// Start sending capture frames
+void SimpleCapture::StartCapture()
+{
+    CheckClosed();
+    m_session.StartCapture();
+}
+
+ICompositionSurface SimpleCapture::CreateSurface(
+    Compositor const& compositor)
+{
+    CheckClosed();
+    return CreateCompositionSurfaceForSwapChain(compositor, m_swapChain.get());
+}
+
+// Process captured frames
+void SimpleCapture::Close()
+{
+    auto expected = false;
+    if (m_closed.compare_exchange_strong(expected, true)) {
+        m_frameArrived.revoke();
+        m_framePool.Close();
+        m_session.Close();
+        m_swapChain = nullptr;
+        m_framePool = nullptr;
+        m_session = nullptr;
+        m_item = nullptr;
+    }
+    m_nv12Buffers.Clear();
+    m_xrgbBuffers.Clear();
+    m_rgbToNv12.Close();
+    Free(m_nv12Frame, [this] { av_frame_free(&m_nv12Frame); });
+    Free(m_xrgbFrame, [this] { av_frame_free(&m_xrgbFrame); });
+}
+
+void SimpleCapture::OnFrameArrived(
+    Direct3D11CaptureFramePool const& sender,
+    winrt::Windows::Foundation::IInspectable const&)
+{
+    auto newSize = false;
+    auto frame = sender.TryGetNextFrame();
+    auto frameContentSize = frame.ContentSize();
+    if (frameContentSize.Width != m_lastSize.Width || frameContentSize.Height != m_lastSize.Height) {
+        // The thing we have been capturing has changed size.
+        // We need to resize our swap chain first, then blit the pixels.
+        // After we do that, retire the frame and then recreate our frame pool.
+        newSize = true;
+        m_lastSize = frameContentSize;
+        m_swapChain->ResizeBuffers(
+            2,
+            static_cast<uint32_t>(m_lastSize.Width),
+            static_cast<uint32_t>(m_lastSize.Height),
+            static_cast<DXGI_FORMAT>(DirectXPixelFormat::B8G8R8A8UIntNormalized),
+            0);
+        m_nv12Buffers.Clear();
+        m_xrgbBuffers.Clear();
+    }
+    if (m_cnt > 0) {
+        --m_cnt;
+    }
+    m_isCapture = (m_isCapture && !newSize) || m_cnt > 0;
+    if (m_isCapture) {
+        auto frameSurface = GetDXGIInterfaceFromObject<ID3D11Texture2D>(frame.Surface());
+        D3D11_TEXTURE2D_DESC desc;
+        frameSurface->GetDesc(&desc);
+        auto d3dDevice = GetDXGIInterfaceFromObject<ID3D11Device>(m_device);
+
+        // 首先尝试创建 NV12 纹理
+        auto tmpFormat = desc.Format;
+        desc.Format = DXGI_FORMAT_NV12;
+        if (GenNv12Frame(d3dDevice.get(), m_d3dContext.get(), desc, frameSurface.get(),
+                m_nv12Buffers, m_nv12Frame, m_rgbToNv12)) {
+            m_pixType = _PixType::NV12;
+        } else {
+            desc.Format = tmpFormat;
+            GenRgbFrame(d3dDevice.get(), m_d3dContext.get(), desc, frameSurface.get(),
+                m_xrgbBuffers, m_xrgbFrame);
+            m_pixType = _PixType::RGB;
+        }
+    }
+
+    // com_ptr<ID3D11Texture2D> backBuffer;
+    // check_hresult(m_swapChain->GetBuffer(0, guid_of<ID3D11Texture2D>(), backBuffer.put_void()));
+    // m_d3dContext->CopyResource(backBuffer.get(), m_bufferFiller.GetMap());
+
+    // DXGI_PRESENT_PARAMETERS presentParameters = {0};
+    // auto hr = m_swapChain->Present1(1, 0, &presentParameters);
+
+    if (newSize) {
+        m_framePool.Recreate(
+            m_device,
+            DirectXPixelFormat::B8G8R8A8UIntNormalized,
+            2,
+            m_lastSize);
+    }
+}

+ 62 - 0
AvRecorder/test/capturer/wgc/SimpleCapture.h

@@ -0,0 +1,62 @@
+
+#pragma once
+
+#include <chrono>
+#include "d3d/gen_frame.h"
+
+class SimpleCapture {
+public:
+    SimpleCapture(
+        winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice const& device,
+        winrt::Windows::Graphics::Capture::GraphicsCaptureItem const& item,
+        int width, int height);
+    ~SimpleCapture() { Close(); }
+
+    void StartCapture();
+    winrt::Windows::UI::Composition::ICompositionSurface CreateSurface(
+        winrt::Windows::UI::Composition::Compositor const& compositor);
+
+    void SetDrawCursor(bool isDrawCursor) { m_session.IsCursorCaptureEnabled(isDrawCursor); }
+
+    void Close();
+
+    AVFrame* GetFrame() const noexcept { return m_pixType == NV12 ? m_nv12Frame : m_xrgbFrame; }
+
+private:
+    void OnFrameArrived(
+        winrt::Windows::Graphics::Capture::Direct3D11CaptureFramePool const& sender,
+        winrt::Windows::Foundation::IInspectable const& args);
+
+    void CheckClosed()
+    {
+        if (m_closed.load() == true) {
+            throw winrt::hresult_error(RO_E_CLOSED);
+        }
+    }
+
+private:
+    enum _PixType {
+        NV12,
+        RGB
+    };
+
+    winrt::Windows::Graphics::Capture::GraphicsCaptureItem m_item {nullptr};
+    winrt::Windows::Graphics::Capture::Direct3D11CaptureFramePool m_framePool {nullptr};
+    winrt::Windows::Graphics::Capture::GraphicsCaptureSession m_session {nullptr};
+    winrt::Windows::Graphics::SizeInt32 m_lastSize;
+
+    winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice m_device {nullptr};
+    winrt::com_ptr<IDXGISwapChain1> m_swapChain {nullptr};
+    winrt::com_ptr<ID3D11DeviceContext> m_d3dContext {nullptr};
+
+    std::atomic<bool> m_closed = false;
+    winrt::Windows::Graphics::Capture::Direct3D11CaptureFramePool::FrameArrived_revoker m_frameArrived;
+    AVFrame* m_xrgbFrame = nullptr;
+    AVFrame* m_nv12Frame = nullptr;
+    BufferFiller m_xrgbBuffers;
+    BufferFiller m_nv12Buffers;
+    D3dConverter m_rgbToNv12;
+    _PixType m_pixType;
+    bool m_isCapture = true;
+    int m_cnt = 5;
+};

+ 24 - 0
AvRecorder/test/capturer/wgc/capture.interop.h

@@ -0,0 +1,24 @@
+#pragma once
+#include <guiddef.h>
+
+#include <winrt/Windows.Graphics.Capture.h>
+#include <windows.graphics.capture.interop.h>
+#include <windows.graphics.capture.h>
+
+inline auto CreateCaptureItemForWindow(HWND hwnd)
+{
+    auto activation_factory = winrt::get_activation_factory<winrt::Windows::Graphics::Capture::GraphicsCaptureItem>();
+    auto interop_factory = activation_factory.as<IGraphicsCaptureItemInterop>();
+    winrt::Windows::Graphics::Capture::GraphicsCaptureItem item = {nullptr};
+    interop_factory->CreateForWindow(hwnd, winrt::guid_of<ABI::Windows::Graphics::Capture::IGraphicsCaptureItem>(), reinterpret_cast<void**>(winrt::put_abi(item)));
+    return item;
+}
+
+inline auto CreateCaptureItemForMonitor(HMONITOR monitor)
+{
+    auto activation_factory = winrt::get_activation_factory<winrt::Windows::Graphics::Capture::GraphicsCaptureItem>();
+    auto interop_factory = activation_factory.as<IGraphicsCaptureItemInterop>();
+    winrt::Windows::Graphics::Capture::GraphicsCaptureItem item = {nullptr};
+    interop_factory->CreateForMonitor(monitor, winrt::guid_of<ABI::Windows::Graphics::Capture::IGraphicsCaptureItem>(), reinterpret_cast<void**>(winrt::put_abi(item)));
+    return item;
+}

+ 61 - 0
AvRecorder/test/capturer/wgc/composition.interop.h

@@ -0,0 +1,61 @@
+#pragma once
+#include <guiddef.h>
+
+#include <winrt/Windows.UI.Composition.h>
+#include <windows.ui.composition.interop.h>
+#include <d2d1_1.h>
+
+inline auto CreateCompositionGraphicsDevice(
+    winrt::Windows::UI::Composition::Compositor const& compositor,
+    ::IUnknown* device)
+{
+    winrt::Windows::UI::Composition::CompositionGraphicsDevice graphicsDevice{ nullptr };
+    auto compositorInterop = compositor.as<ABI::Windows::UI::Composition::ICompositorInterop>();
+    winrt::com_ptr<ABI::Windows::UI::Composition::ICompositionGraphicsDevice> graphicsInterop;
+    winrt::check_hresult(compositorInterop->CreateGraphicsDevice(device, graphicsInterop.put()));
+    winrt::check_hresult(graphicsInterop->QueryInterface(winrt::guid_of<winrt::Windows::UI::Composition::CompositionGraphicsDevice>(),
+        reinterpret_cast<void**>(winrt::put_abi(graphicsDevice))));
+    return graphicsDevice;
+}
+
+inline void ResizeSurface(
+    winrt::Windows::UI::Composition::CompositionDrawingSurface const& surface,
+    winrt::Windows::Foundation::Size const& size)
+{
+    auto surfaceInterop = surface.as<ABI::Windows::UI::Composition::ICompositionDrawingSurfaceInterop>();
+    SIZE newSize = {};
+    newSize.cx = static_cast<LONG>(std::round(size.Width));
+    newSize.cy = static_cast<LONG>(std::round(size.Height));
+    winrt::check_hresult(surfaceInterop->Resize(newSize));
+}
+
+inline auto SurfaceBeginDraw(
+    winrt::Windows::UI::Composition::CompositionDrawingSurface const& surface)
+{
+    auto surfaceInterop = surface.as<ABI::Windows::UI::Composition::ICompositionDrawingSurfaceInterop>();
+    winrt::com_ptr<ID2D1DeviceContext> context;
+    POINT offset = {};
+    winrt::check_hresult(surfaceInterop->BeginDraw(nullptr, __uuidof(ID2D1DeviceContext), context.put_void(), &offset));
+    context->SetTransform(D2D1::Matrix3x2F::Translation((FLOAT)offset.x,(FLOAT) offset.y));
+    return context;
+}
+
+inline void SurfaceEndDraw(
+    winrt::Windows::UI::Composition::CompositionDrawingSurface const& surface)
+{
+    auto surfaceInterop = surface.as<ABI::Windows::UI::Composition::ICompositionDrawingSurfaceInterop>();
+    winrt::check_hresult(surfaceInterop->EndDraw());
+}
+
+inline auto CreateCompositionSurfaceForSwapChain(
+    winrt::Windows::UI::Composition::Compositor const& compositor,
+    ::IUnknown* swapChain)
+{
+    winrt::Windows::UI::Composition::ICompositionSurface surface{ nullptr };
+    auto compositorInterop = compositor.as<ABI::Windows::UI::Composition::ICompositorInterop>();
+    winrt::com_ptr<ABI::Windows::UI::Composition::ICompositionSurface> surfaceInterop;
+    winrt::check_hresult(compositorInterop->CreateCompositionSurfaceForSwapChain(swapChain, surfaceInterop.put()));
+    winrt::check_hresult(surfaceInterop->QueryInterface(winrt::guid_of<winrt::Windows::UI::Composition::ICompositionSurface>(),
+        reinterpret_cast<void**>(winrt::put_abi(surface))));
+    return surface;
+}

+ 173 - 0
AvRecorder/test/capturer/wgc/d3dHelpers.h

@@ -0,0 +1,173 @@
+#pragma once
+
+#include "composition.interop.h"
+
+struct SurfaceContext
+{
+public:
+    SurfaceContext(std::nullptr_t) {}
+    SurfaceContext(
+        winrt::Windows::UI::Composition::CompositionDrawingSurface surface)
+    {
+        m_surface = surface;
+        m_d2dContext = SurfaceBeginDraw(m_surface);
+    }
+    ~SurfaceContext()
+    {
+        SurfaceEndDraw(m_surface);
+        m_d2dContext = nullptr;
+        m_surface = nullptr;
+    }
+
+    winrt::com_ptr<ID2D1DeviceContext> GetDeviceContext() { return m_d2dContext; }
+
+private:
+    winrt::com_ptr<ID2D1DeviceContext> m_d2dContext;
+    winrt::Windows::UI::Composition::CompositionDrawingSurface m_surface{ nullptr };
+};
+
+struct D3D11DeviceLock
+{
+public:
+    D3D11DeviceLock(std::nullopt_t) {}
+    D3D11DeviceLock(ID3D11Multithread* pMultithread)
+    {
+        m_multithread.copy_from(pMultithread);
+        m_multithread->Enter();
+    }
+    ~D3D11DeviceLock()
+    {
+        m_multithread->Leave();
+        m_multithread = nullptr;
+    }
+private:
+    winrt::com_ptr<ID3D11Multithread> m_multithread;
+};
+
+inline auto
+CreateWICFactory()
+{
+    winrt::com_ptr<IWICImagingFactory2> wicFactory;
+    winrt::check_hresult(
+        ::CoCreateInstance(
+            CLSID_WICImagingFactory,
+            nullptr,
+            CLSCTX_INPROC_SERVER,
+            winrt::guid_of<IWICImagingFactory>(),
+            wicFactory.put_void()));
+
+    return wicFactory;
+}
+
+inline auto
+CreateD2DDevice(
+    winrt::com_ptr<ID2D1Factory1> const& factory,
+    winrt::com_ptr<ID3D11Device> const& device)
+{
+    winrt::com_ptr<ID2D1Device> result;
+    winrt::check_hresult(factory->CreateDevice(device.as<IDXGIDevice>().get(), result.put()));
+    return result;
+}
+
+inline auto
+CreateD3DDevice(
+    D3D_DRIVER_TYPE const type,
+    winrt::com_ptr<ID3D11Device>& device)
+{
+    WINRT_ASSERT(!device);
+
+    UINT flags = D3D11_CREATE_DEVICE_BGRA_SUPPORT;
+
+//#ifdef _DEBUG
+//	flags |= D3D11_CREATE_DEVICE_DEBUG;
+//#endif
+
+    return D3D11CreateDevice(
+        nullptr,
+        type,
+        nullptr,
+        flags,
+        nullptr, 0,
+        D3D11_SDK_VERSION,
+        device.put(),
+        nullptr,
+        nullptr);
+}
+
+inline auto
+CreateD3DDevice()
+{
+    winrt::com_ptr<ID3D11Device> device;
+    HRESULT hr = CreateD3DDevice(D3D_DRIVER_TYPE_HARDWARE, device);
+
+    if (DXGI_ERROR_UNSUPPORTED == hr)
+    {
+        hr = CreateD3DDevice(D3D_DRIVER_TYPE_WARP, device);
+    }
+
+    winrt::check_hresult(hr);
+    return device;
+}
+
+inline auto
+CreateD2DFactory()
+{
+    D2D1_FACTORY_OPTIONS options{};
+
+//#ifdef _DEBUG
+//	options.debugLevel = D2D1_DEBUG_LEVEL_INFORMATION;
+//#endif
+
+    winrt::com_ptr<ID2D1Factory1> factory;
+
+    winrt::check_hresult(D2D1CreateFactory(
+        D2D1_FACTORY_TYPE_SINGLE_THREADED,
+        options,
+        factory.put()));
+
+    return factory;
+}
+
+inline auto
+CreateDXGISwapChain(
+    winrt::com_ptr<ID3D11Device> const& device,
+    const DXGI_SWAP_CHAIN_DESC1* desc)
+{
+    auto dxgiDevice = device.as<IDXGIDevice2>();
+    winrt::com_ptr<IDXGIAdapter> adapter;
+    winrt::check_hresult(dxgiDevice->GetParent(winrt::guid_of<IDXGIAdapter>(), adapter.put_void()));
+    winrt::com_ptr<IDXGIFactory2> factory;
+    winrt::check_hresult(adapter->GetParent(winrt::guid_of<IDXGIFactory2>(), factory.put_void()));
+
+    winrt::com_ptr<IDXGISwapChain1> swapchain;
+    winrt::check_hresult(factory->CreateSwapChainForComposition(
+        device.get(),
+        desc,
+        nullptr,
+        swapchain.put()));
+
+    return swapchain;
+}
+
+inline auto
+CreateDXGISwapChain(
+    winrt::com_ptr<ID3D11Device> const& device,
+    uint32_t width,
+    uint32_t height,
+    DXGI_FORMAT format,
+    uint32_t bufferCount)
+{
+    DXGI_SWAP_CHAIN_DESC1 desc = {};
+    desc.Width = width;
+    desc.Height = height;
+    desc.Format = format;
+    desc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
+    desc.SampleDesc.Count = 1;
+    desc.SampleDesc.Quality = 0;
+    desc.BufferCount = bufferCount;
+    desc.Scaling = DXGI_SCALING_STRETCH;
+    desc.SwapEffect = DXGI_SWAP_EFFECT_FLIP_SEQUENTIAL;
+    desc.AlphaMode = DXGI_ALPHA_MODE_PREMULTIPLIED;
+
+    return CreateDXGISwapChain(device, &desc);
+}

+ 40 - 0
AvRecorder/test/capturer/wgc/direct3d11.interop.h

@@ -0,0 +1,40 @@
+#pragma once
+#include <winrt/windows.graphics.directx.direct3d11.h>
+
+extern "C"
+{
+    HRESULT __stdcall CreateDirect3D11DeviceFromDXGIDevice(::IDXGIDevice* dxgiDevice,
+        ::IInspectable** graphicsDevice);
+
+    HRESULT __stdcall CreateDirect3D11SurfaceFromDXGISurface(::IDXGISurface* dgxiSurface,
+        ::IInspectable** graphicsSurface);
+}
+
+struct __declspec(uuid("A9B3D012-3DF2-4EE3-B8D1-8695F457D3C1"))
+    IDirect3DDxgiInterfaceAccess : ::IUnknown
+{
+    virtual HRESULT __stdcall GetInterface(GUID const& id, void** object) = 0;
+};
+
+inline auto CreateDirect3DDevice(IDXGIDevice* dxgi_device)
+{
+    winrt::com_ptr<::IInspectable> d3d_device;
+    winrt::check_hresult(CreateDirect3D11DeviceFromDXGIDevice(dxgi_device, d3d_device.put()));
+    return d3d_device.as<winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice>();
+}
+
+inline auto CreateDirect3DSurface(IDXGISurface* dxgi_surface)
+{
+    winrt::com_ptr<::IInspectable> d3d_surface;
+    winrt::check_hresult(CreateDirect3D11SurfaceFromDXGISurface(dxgi_surface, d3d_surface.put()));
+    return d3d_surface.as<winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DSurface>();
+}
+
+template <typename T>
+auto GetDXGIInterfaceFromObject(winrt::Windows::Foundation::IInspectable const& object)
+{
+    auto access = object.as<IDirect3DDxgiInterfaceAccess>();
+    winrt::com_ptr<T> result;
+    winrt::check_hresult(access->GetInterface(winrt::guid_of<T>(), result.put_void()));
+    return result;
+}

+ 9 - 0
AvRecorder/test/capturer/wgc/pch.cpp

@@ -0,0 +1,9 @@
+
+// D3D
+#include <d3d11_4.h>
+#include <dxgi1_6.h>
+#include <d2d1_3.h>
+#include <wincodec.h>
+
+
+#include "pch.h"

+ 34 - 0
AvRecorder/test/capturer/wgc/pch.h

@@ -0,0 +1,34 @@
+#pragma once
+#include <Unknwn.h>
+#include <inspectable.h>
+
+// WinRT
+
+#include <winrt/Windows.Foundation.h>
+#include <winrt/Windows.Graphics.Capture.h>
+#include <winrt/Windows.Graphics.DirectX.Direct3d11.h>
+#include <winrt/Windows.Graphics.DirectX.h>
+#include <winrt/Windows.System.h>
+#include <winrt/Windows.UI.Composition.Desktop.h>
+#include <winrt/Windows.UI.Composition.h>
+#include <winrt/Windows.UI.Popups.h>
+#include <winrt/Windows.UI.h>
+
+#include <windows.ui.composition.interop.h>
+#include <DispatcherQueue.h>
+
+// STL
+#include <atomic>
+#include <memory>
+
+// D3D
+#include <d3d11_4.h>
+#include <dxgi1_6.h>
+#include <d2d1_3.h>
+#include <wincodec.h>
+
+// Helpers
+#include "composition.interop.h"
+#include "d3dHelpers.h"
+#include "direct3d11.interop.h"
+#include "capture.interop.h"

+ 32 - 0
AvRecorder/test/capturer/wgc/winrt.cpp

@@ -0,0 +1,32 @@
+#include "pch.h"
+
+#include <ShObjIdl.h>
+
+using namespace winrt;
+using namespace Windows::UI;
+using namespace Windows::UI::Composition;
+using namespace Windows::UI::Composition::Desktop;
+
+// Direct3D11CaptureFramePool requires a DispatcherQueue
+winrt::Windows::System::DispatcherQueueController CreateDispatcherQueueController()
+{
+    namespace abi = ABI::Windows::System;
+
+    DispatcherQueueOptions options {
+        sizeof(DispatcherQueueOptions),
+        DQTYPE_THREAD_CURRENT,
+        DQTAT_COM_STA};
+
+    Windows::System::DispatcherQueueController controller {nullptr};
+    check_hresult(CreateDispatcherQueueController(options, reinterpret_cast<abi::IDispatcherQueueController**>(put_abi(controller))));
+    return controller;
+}
+
+DesktopWindowTarget CreateDesktopWindowTarget(Compositor const& compositor, HWND window)
+{
+    namespace abi = ABI::Windows::UI::Composition::Desktop;
+    auto interop = compositor.as<abi::ICompositorDesktopInterop>();
+    DesktopWindowTarget target {nullptr};
+    check_hresult(interop->CreateDesktopWindowTarget(window, true, reinterpret_cast<abi::IDesktopWindowTarget**>(put_abi(target))));
+    return target;
+}

+ 24 - 0
AvRecorder/test/capturer/wgc/winrt.h

@@ -0,0 +1,24 @@
+#pragma once
+
+// WinRT
+#include <winrt/Windows.Foundation.h>
+#include <winrt/Windows.System.h>
+#include <winrt/Windows.UI.h>
+#include <winrt/Windows.UI.Composition.h>
+#include <winrt/Windows.UI.Composition.Desktop.h>
+#include <winrt/Windows.UI.Popups.h>
+#include <winrt/Windows.Graphics.Capture.h>
+#include <winrt/Windows.Graphics.DirectX.h>
+#include <winrt/Windows.Graphics.DirectX.Direct3d11.h>
+
+#include <windows.ui.composition.interop.h>
+#include <DispatcherQueue.h>
+
+using namespace winrt;
+using namespace Windows::UI;
+using namespace Windows::UI::Composition;
+using namespace Windows::UI::Composition::Desktop;
+
+// Direct3D11CaptureFramePool requires a DispatcherQueue
+winrt::Windows::System::DispatcherQueueController CreateDispatcherQueueController();
+DesktopWindowTarget CreateDesktopWindowTarget(Compositor const& compositor, HWND window);

+ 96 - 0
AvRecorder/test/capturer/wgc_capturer.cpp

@@ -0,0 +1,96 @@
+#include "wgc_capturer.h"
+
+#include "wgc/winrt.h"
+
+#include <QWidget>
+
+winrt::Windows::System::DispatcherQueue* WgcCapturer::queuePtr = nullptr;
+winrt::Windows::UI::Composition::ContainerVisual* WgcCapturer::rootPtr = nullptr;
+std::list<WgcCapturer*> WgcCapturer::_capturers;
+QWidget* __widget = nullptr;
+
+void WgcCapturer::Init()
+{
+    if (queuePtr != nullptr) {
+        return;
+    }
+    // Init COM
+    init_apartment(apartment_type::single_threaded);
+    // Create a DispatcherQueue for our thread
+    static auto controller = CreateDispatcherQueueController();
+    // Initialize Composition
+    static auto compositor = Compositor();
+    __widget = new QWidget;
+    // __widget->resize(800, 600);
+    // __widget->show();
+    static auto target = CreateDesktopWindowTarget(compositor, (HWND)__widget->winId());
+    static auto root = compositor.CreateContainerVisual();
+    root.RelativeSizeAdjustment({1.0f, 1.0f});
+    target.Root(root);
+
+    // Enqueue our capture work on the dispatcher
+    static auto queue = controller.DispatcherQueue();
+    queuePtr = &queue;
+    rootPtr = &root;
+    // 首先 New 一个 Capturer 备用
+    New();
+}
+
+void WgcCapturer::Uninit()
+{
+    delete __widget;
+    while (!_capturers.empty()) {
+        delete *_capturers.begin();
+        _capturers.erase(_capturers.begin());
+    }
+}
+
+WgcCapturer* WgcCapturer::New()
+{
+    // 将上一个 new 好的对象返回,并重新预备一个新的
+    if (_capturers.empty()) {
+        _capturers.push_back(new WgcCapturer);
+    }
+    return *(--_capturers.end());
+}
+
+void WgcCapturer::Delete(WgcCapturer* ptr)
+{
+    // auto iter = std::find(_capturers.begin(), _capturers.end(), ptr);
+    // if (iter == _capturers.end()) {
+    //     return;
+    // }
+    // if (*iter != nullptr) {
+    //     delete *iter;
+    // }
+    // _capturers.erase(iter);
+}
+
+WgcCapturer::WgcCapturer()
+{
+    _app = new App;
+    _isAppInit = false;
+    auto success = queuePtr->TryEnqueue([=]() -> void {
+        _app->Initialize(*rootPtr);
+        _isAppInit = true;
+    });
+    WINRT_VERIFY(success);
+}
+
+WgcCapturer::~WgcCapturer()
+{
+    if (_app) {
+        delete _app;
+        _app = nullptr;
+    }
+}
+
+bool WgcCapturer::StartCapturerMonitor(HMONITOR monitor, int width, int height)
+{
+    return _app->StartCaptureMonitor(monitor, width, height);
+}
+
+bool WgcCapturer::StartCapturerWindow(HWND hwnd, int width, int height)
+{
+    return _app->StartCaptureWindow(hwnd, width, height);
+}

+ 35 - 0
AvRecorder/test/capturer/wgc_capturer.h

@@ -0,0 +1,35 @@
+#ifndef __WGC_CAPTURER_H__
+#define __WGC_CAPTURER_H__
+
+#include "wgc/pch.h"
+
+#include "wgc/App.h"
+#include <list>
+
+class WgcCapturer {
+public:
+    bool StartCapturerWindow(HWND hwnd, int width, int height);
+    bool StartCapturerMonitor(HMONITOR monitor, int width, int height);
+    void SetDrawCursor(bool isDrawCursor) { _app->SetDrawCursor(isDrawCursor); }
+    static void Init();
+    static WgcCapturer* New();
+    static void Delete(WgcCapturer* ptr);
+    static void Uninit();
+    void Close()
+    {
+        if (_app != nullptr) {
+            _app->Close();
+        }
+    }
+    AVFrame* GetFrame() { return _app->GetFrame(); }
+
+private:
+    WgcCapturer();
+    ~WgcCapturer();
+    App* _app = nullptr;
+    bool _isAppInit = false;
+    static std::list<WgcCapturer*> _capturers;
+    static winrt::Windows::System::DispatcherQueue* queuePtr;
+    static winrt::Windows::UI::Composition::ContainerVisual* rootPtr;
+};
+#endif

+ 60 - 0
AvRecorder/test/d3d/buffer_filler.cpp

@@ -0,0 +1,60 @@
+#include "buffer_filler.h"
+#include "basic/basic.h"
+
+bool BufferFiller::Fill(ID3D11Device* device, D3D11_TEXTURE2D_DESC desc, int maxCnt)
+{
+    // 设置通用的纹理属性
+    desc.ArraySize = 1;
+    desc.BindFlags = 0;
+    desc.MiscFlags = 0;
+    desc.SampleDesc.Count = 1;
+    desc.SampleDesc.Quality = 0;
+    desc.MipLevels = 1;
+    desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
+    desc.Usage = D3D11_USAGE_STAGING;
+
+    // 如果已达到最大缓冲区数量,则替换现有缓冲区
+    if (_buffers.size() == maxCnt) {
+        ID3D11Texture2D* dstImg = nullptr;
+        if (FAILED(device->CreateTexture2D(&desc, nullptr, &dstImg))) {
+            return false;
+        }
+        // 释放旧的缓冲区并替换
+        if (_buffers[_mapIdx]) {
+            _buffers[_mapIdx]->Release();
+        }
+        _buffers[_mapIdx] = dstImg;
+        _mapIdx = (_mapIdx + 1) % _buffers.size();
+        return true;
+    }
+
+    // 否则创建新的缓冲区直到达到最大数量
+    while (_buffers.size() < maxCnt) {
+        ID3D11Texture2D* dstImg = nullptr;
+        if (FAILED(device->CreateTexture2D(&desc, nullptr, &dstImg))) {
+            break;
+        }
+        _buffers.push_back(dstImg);
+    }
+
+    __CheckBool(!_buffers.empty());
+    _copyIdx = 0;
+    _mapIdx = (_copyIdx + 1) % _buffers.size();
+    return true;
+}
+
+bool BufferFiller::Reset()
+{
+    _buffers[_mapIdx]->Release();
+    _buffers[_mapIdx] = nullptr;
+    _copyIdx = (_copyIdx + 1) % _buffers.size();
+    return true;
+}
+
+void BufferFiller::Clear()
+{
+    for (auto&& dstImg : _buffers) {
+        Free(dstImg, [&dstImg] { dstImg->Release(); });
+    }
+    _buffers.clear();
+}

+ 24 - 0
AvRecorder/test/d3d/buffer_filler.h

@@ -0,0 +1,24 @@
+#ifndef __BUFFER_FILLER_H__
+#define __BUFFER_FILLER_H__
+#include <d3d11.h>
+#include <vector>
+
+class BufferFiller {
+public:
+    bool Fill(ID3D11Device* device, D3D11_TEXTURE2D_DESC desc, int maxCnt = 3);
+    bool Reset();
+    ID3D11Texture2D* GetCopy() { return _buffers[_copyIdx]; }
+    ID3D11Texture2D* GetMap() { return _buffers[_mapIdx]; }
+    void Clear();
+    ~BufferFiller()
+    {
+        Clear();
+    }
+
+private:
+    int _mapIdx = 0;
+    int _copyIdx = 0;
+    std::vector<ID3D11Texture2D*> _buffers;
+};
+
+#endif

+ 131 - 0
AvRecorder/test/d3d/convert.cpp

@@ -0,0 +1,131 @@
+
+#include "convert.h"
+using namespace std;
+
+#if !defined(SAFE_RELEASE)
+#define SAFE_RELEASE(X) \
+    if (X) {            \
+        X->Release();   \
+        X = nullptr;    \
+    }
+#endif
+
+#if !defined(PRINTERR1)
+#define PRINTERR1(x) printf(__FUNCTION__ ": Error 0x%08x at line %d in file %s\n", x, __LINE__, __FILE__);
+#endif
+
+#if !defined(PRINTERR)
+#define PRINTERR(x, y) printf(__FUNCTION__ ": Error 0x%08x in %s at line %d in file %s\n", x, y, __LINE__, __FILE__);
+#endif
+
+/// Initialize Video Context
+HRESULT D3dConverter::Open(ID3D11Device* pDev, ID3D11DeviceContext* pCtx,
+    const D3D11_VIDEO_PROCESSOR_COLOR_SPACE& inColorSpace, D3D11_VIDEO_PROCESSOR_COLOR_SPACE& outColorSpace)
+{
+    m_pDev = pDev;
+    m_pCtx = pCtx;
+    m_pDev->AddRef();
+    m_pCtx->AddRef();
+    /// Obtain Video device and Video device context
+    HRESULT hr = m_pDev->QueryInterface(__uuidof(ID3D11VideoDevice), (void**)&m_pVid);
+    if (FAILED(hr)) {
+        PRINTERR(hr, "QAI for ID3D11VideoDevice");
+    }
+    hr = m_pCtx->QueryInterface(__uuidof(ID3D11VideoContext), (void**)&m_pVidCtx);
+    if (FAILED(hr)) {
+        PRINTERR(hr, "QAI for ID3D11VideoContext");
+    }
+    _inColorSpace = inColorSpace;
+    _outColorSpace = outColorSpace;
+    return hr;
+}
+
+/// Release all Resources
+void D3dConverter::Close()
+{
+    for (auto& it : viewMap) {
+        ID3D11VideoProcessorOutputView* pVPOV = it.second;
+        pVPOV->Release();
+    }
+    SAFE_RELEASE(m_pVP);
+    SAFE_RELEASE(m_pVPEnum);
+    SAFE_RELEASE(m_pVidCtx);
+    SAFE_RELEASE(m_pVid);
+    SAFE_RELEASE(m_pCtx);
+    SAFE_RELEASE(m_pDev);
+}
+
+/// Perform Colorspace conversion
+HRESULT D3dConverter::Convert(ID3D11Texture2D* pIn, ID3D11Texture2D* pOut)
+{
+    HRESULT hr = S_OK;
+
+    D3D11_TEXTURE2D_DESC inDesc = {0};
+    D3D11_TEXTURE2D_DESC outDesc = {0};
+    pIn->GetDesc(&inDesc);
+    pOut->GetDesc(&outDesc);
+
+    /// Check if VideoProcessor needs to be reconfigured
+    /// Reconfiguration is required if input/output dimensions have changed
+    if (m_pVP) {
+        if (m_inDesc.Width != inDesc.Width || m_inDesc.Height != inDesc.Height || m_outDesc.Width != outDesc.Width || m_outDesc.Height != outDesc.Height) {
+            SAFE_RELEASE(m_pVPEnum);
+            SAFE_RELEASE(m_pVP);
+        }
+    }
+
+    if (!m_pVP) {
+        /// Initialize Video Processor
+        m_inDesc = inDesc;
+        m_outDesc = outDesc;
+        D3D11_VIDEO_PROCESSOR_CONTENT_DESC contentDesc = {
+            D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE,
+            {0, 0}, inDesc.Width, inDesc.Height,
+            {0, 0}, outDesc.Width, outDesc.Height,
+            D3D11_VIDEO_USAGE_PLAYBACK_NORMAL};
+        hr = m_pVid->CreateVideoProcessorEnumerator(&contentDesc, &m_pVPEnum);
+        if (FAILED(hr)) {
+            PRINTERR(hr, "CreateVideoProcessorEnumerator");
+        }
+        hr = m_pVid->CreateVideoProcessor(m_pVPEnum, 0, &m_pVP);
+        if (FAILED(hr)) {
+            PRINTERR(hr, "CreateVideoProcessor");
+        }
+
+        m_pVidCtx->VideoProcessorSetStreamColorSpace(m_pVP, 0, &_inColorSpace);
+        m_pVidCtx->VideoProcessorSetOutputColorSpace(m_pVP, &_outColorSpace);
+    }
+
+    /// Obtain Video Processor Input view from input texture
+    ID3D11VideoProcessorInputView* pVPIn = nullptr;
+    D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC inputVD = {0, D3D11_VPIV_DIMENSION_TEXTURE2D, {0, 0}};
+    hr = m_pVid->CreateVideoProcessorInputView(pIn, m_pVPEnum, &inputVD, &pVPIn);
+    if (FAILED(hr)) {
+        PRINTERR(hr, "CreateVideoProcessInputView");
+        return hr;
+    }
+
+    /// Obtain Video Processor Output view from output texture
+    ID3D11VideoProcessorOutputView* pVPOV = nullptr;
+    D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC ovD = {D3D11_VPOV_DIMENSION_TEXTURE2D};
+    hr = m_pVid->CreateVideoProcessorOutputView(pOut, m_pVPEnum, &ovD, &pVPOV);
+    if (FAILED(hr)) {
+        SAFE_RELEASE(pVPIn);
+        PRINTERR(hr, "CreateVideoProcessorOutputView");
+        return hr;
+    }
+
+    /// Create a Video Processor Stream to run the operation
+    D3D11_VIDEO_PROCESSOR_STREAM stream = {TRUE, 0, 0, 0, 0, nullptr, pVPIn, nullptr};
+
+    /// Perform the Colorspace conversion
+    hr = m_pVidCtx->VideoProcessorBlt(m_pVP, pVPOV, 0, 1, &stream);
+    if (FAILED(hr)) {
+        SAFE_RELEASE(pVPIn);
+        PRINTERR(hr, "VideoProcessorBlt");
+        return hr;
+    }
+    SAFE_RELEASE(pVPIn);
+    SAFE_RELEASE(pVPOV);
+    return hr;
+}

+ 80 - 0
AvRecorder/test/d3d/convert.h

@@ -0,0 +1,80 @@
+/*
+ * Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in the
+ *    documentation and/or other materials provided with the distribution.
+ *  * Neither the name of NVIDIA CORPORATION nor the names of its
+ *    contributors may be used to endorse or promote products derived
+ *    from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#pragma once
+#include <dxgi1_2.h>
+#include <d3d11_2.h>
+#include <unordered_map>
+
+class D3dConverter {
+    /// Simple Preprocessor class
+    /// Uses DXVAHD VideoProcessBlt to perform colorspace conversion
+private:
+    /// D3D11 device to be used for Processing
+    ID3D11Device* m_pDev = nullptr;
+    /// D3D11 device context to be used for Processing
+    ID3D11DeviceContext* m_pCtx = nullptr;
+    /// D3D11 video device to be used for Processing, obtained from d3d11 device
+    ID3D11VideoDevice* m_pVid = nullptr;
+    /// D3D11 video device context to be used for Processing, obtained from d3d11 device
+    ID3D11VideoContext* m_pVidCtx = nullptr;
+    /// DXVAHD video processor configured for processing.
+    /// Needs to be reconfigured based on input and output textures for each Convert() call
+    ID3D11VideoProcessor* m_pVP = nullptr;
+    /// DXVAHD VpBlt output target. Obtained from the output texture passed to Convert()
+    ID3D11VideoProcessorOutputView* m_pVPOut = nullptr;
+    /// D3D11 video processor enumerator. Required to configure Video processor streams
+    ID3D11VideoProcessorEnumerator* m_pVPEnum = nullptr;
+    /// Mapping of Texture2D handle and corresponding Video Processor output view handle
+    /// Optimization to avoid having to create video processor output views in each Convert() call
+    std::unordered_map<ID3D11Texture2D*, ID3D11VideoProcessorOutputView*> viewMap;
+    /// Input and Output Texture2D properties.
+    /// Required to optimize Video Processor stream usage
+    D3D11_TEXTURE2D_DESC m_inDesc = {0};
+    D3D11_TEXTURE2D_DESC m_outDesc = {0};
+    D3D11_VIDEO_PROCESSOR_COLOR_SPACE _inColorSpace;
+    D3D11_VIDEO_PROCESSOR_COLOR_SPACE _outColorSpace;
+
+public:
+    /// Initialize Video Context
+    HRESULT Open(ID3D11Device* pDev, ID3D11DeviceContext* pCtx,
+        const D3D11_VIDEO_PROCESSOR_COLOR_SPACE& inColorSpace, D3D11_VIDEO_PROCESSOR_COLOR_SPACE& outColorSpace);
+    /// Perform Colorspace conversion
+    HRESULT Convert(ID3D11Texture2D* pIn, ID3D11Texture2D* pOut);
+    /// Release all resources
+    void Close();
+
+public:
+    /// Constructor
+    // RGBToNV12(ID3D11Device *pDev, ID3D11DeviceContext *pCtx);
+    /// Destructor. Release all resources before destroying object
+    ~D3dConverter()
+    {
+        Close();
+    }
+};

+ 81 - 0
AvRecorder/test/d3d/gen_frame.cpp

@@ -0,0 +1,81 @@
+#include "gen_frame.h"
+
+#include <winrt/base.h>
+
+#undef min
+#undef max
+
+bool GenNv12Frame(ID3D11Device* device,
+                  ID3D11DeviceContext* ctx,
+                  const D3D11_TEXTURE2D_DESC& desc,
+                  ID3D11Texture2D* img,
+                  BufferFiller& buffers,
+                  AVFrame*& outFrame,
+                  D3dConverter& rgbToNv12)
+{
+    winrt::com_ptr<ID3D11Texture2D> nv12Img = nullptr;
+    if (FAILED(device->CreateTexture2D(&desc, nullptr, nv12Img.put()))) {
+        return false;
+    }
+    __CheckBool(SUCCEEDED(rgbToNv12.Convert(img, nv12Img.get())));
+    // 填充缓冲区
+    __CheckBool(buffers.Fill(device, desc));
+
+    ctx->CopyResource(buffers.GetCopy(), nv12Img.get());
+    D3D11_MAPPED_SUBRESOURCE resource;
+    __CheckBool(SUCCEEDED(ctx->Map(buffers.GetMap(), 0, D3D11_MAP_READ, 0, &resource)));
+    auto height = std::min(outFrame->height, (int)desc.Height);
+    auto width = outFrame->width;
+    auto srcLinesize = resource.RowPitch;
+    auto dstLinesize = outFrame->linesize[0];
+    auto srcData = (uint8_t*)resource.pData;
+    auto titleHeight = std::max(int(desc.Height - height), 0);
+    /* auto copyLine = std::min(std::min(width, (int) srcLinesize), dstLinesize);*/
+    auto border = (desc.Width - width) / 2;
+    __mtx.lock();
+
+    // Y
+    int Ystart = (titleHeight - border) * srcLinesize + border;
+    auto dstData = outFrame->data[0];
+    for (int row = 0; row < height; ++row) {
+        memcpy(dstData + row * dstLinesize, srcData + Ystart + row * srcLinesize, width);
+    }
+
+    // UV
+    dstData = outFrame->data[1];
+    int UVStart = srcLinesize * desc.Height + (titleHeight - border) / 2 * srcLinesize + border / 2 * 2;
+    for (int row = 0; row < height / 2; ++row) {
+        memcpy(dstData + row * dstLinesize, srcData + UVStart + row * srcLinesize, width);
+    }
+
+    __mtx.unlock();
+    ctx->Unmap(buffers.GetMap(), 0);
+    __CheckBool(buffers.Reset());
+    return true;
+}
+bool GenRgbFrame(ID3D11Device* device, ID3D11DeviceContext* ctx, const D3D11_TEXTURE2D_DESC& desc,
+    ID3D11Texture2D* img, BufferFiller& buffers, AVFrame*& outFrame)
+{
+    __CheckBool(buffers.Fill(device, desc));
+    ctx->CopyResource(buffers.GetCopy(), img);
+    D3D11_MAPPED_SUBRESOURCE resource;
+    __CheckBool(SUCCEEDED(ctx->Map(buffers.GetMap(), 0, D3D11_MAP_READ, 0, &resource)));
+    auto height = std::min(outFrame->height, (int)desc.Height);
+    auto width = outFrame->width;
+    auto srcLinesize = resource.RowPitch;
+    auto dstLinesize = outFrame->linesize[0];
+    auto srcData = (uint8_t*)resource.pData;
+    auto dstData = outFrame->data[0];
+    auto titleHeight = std::max(int(desc.Height - height), 0);
+    auto copyLine = std::min(std::min(width * 4, (int)srcLinesize), dstLinesize);
+    auto border = (desc.Width - width) / 2;
+    __mtx.lock();
+    for (int row = 0; row < height; ++row) {
+        auto offset = (titleHeight + row - border) * srcLinesize + border * 4;
+        memcpy(dstData + row * dstLinesize, srcData + offset, copyLine);
+    }
+    __mtx.unlock();
+    ctx->Unmap(buffers.GetMap(), 0);
+    __CheckBool(buffers.Reset());
+    return true;
+}

Một số tệp đã không được hiển thị bởi vì quá nhiều tập tin thay đổi trong này khác