فهرست منبع

移除不必要的文件

zhuizhu 9 ماه پیش
والد
کامیت
e6b4cc0f81
61فایلهای تغییر یافته به همراه0 افزوده شده و 5039 حذف شده
  1. 0 4
      AvRecorder/test/basic/basic.cpp
  2. 0 85
      AvRecorder/test/basic/basic.h
  3. 0 102
      AvRecorder/test/basic/frame.cpp
  4. 0 78
      AvRecorder/test/basic/frame.h
  5. 0 88
      AvRecorder/test/basic/timer.h
  6. 0 182
      AvRecorder/test/capturer/audio_capturer.cpp
  7. 0 53
      AvRecorder/test/capturer/audio_capturer.h
  8. 0 226
      AvRecorder/test/capturer/dxgi_capturer.cpp
  9. 0 39
      AvRecorder/test/capturer/dxgi_capturer.h
  10. 0 98
      AvRecorder/test/capturer/finder.cpp
  11. 0 43
      AvRecorder/test/capturer/finder.h
  12. 0 56
      AvRecorder/test/capturer/gdi_capturer.cpp
  13. 0 26
      AvRecorder/test/capturer/gdi_capturer.h
  14. 0 145
      AvRecorder/test/capturer/video_capturer.cpp
  15. 0 46
      AvRecorder/test/capturer/video_capturer.h
  16. 0 88
      AvRecorder/test/capturer/wgc/App.cpp
  17. 0 46
      AvRecorder/test/capturer/wgc/App.h
  18. 0 175
      AvRecorder/test/capturer/wgc/SimpleCapture.cpp
  19. 0 62
      AvRecorder/test/capturer/wgc/SimpleCapture.h
  20. 0 24
      AvRecorder/test/capturer/wgc/capture.interop.h
  21. 0 61
      AvRecorder/test/capturer/wgc/composition.interop.h
  22. 0 173
      AvRecorder/test/capturer/wgc/d3dHelpers.h
  23. 0 40
      AvRecorder/test/capturer/wgc/direct3d11.interop.h
  24. 0 9
      AvRecorder/test/capturer/wgc/pch.cpp
  25. 0 34
      AvRecorder/test/capturer/wgc/pch.h
  26. 0 32
      AvRecorder/test/capturer/wgc/winrt.cpp
  27. 0 24
      AvRecorder/test/capturer/wgc/winrt.h
  28. 0 96
      AvRecorder/test/capturer/wgc_capturer.cpp
  29. 0 35
      AvRecorder/test/capturer/wgc_capturer.h
  30. 0 60
      AvRecorder/test/d3d/buffer_filler.cpp
  31. 0 24
      AvRecorder/test/d3d/buffer_filler.h
  32. 0 131
      AvRecorder/test/d3d/convert.cpp
  33. 0 80
      AvRecorder/test/d3d/convert.h
  34. 0 81
      AvRecorder/test/d3d/gen_frame.cpp
  35. 0 13
      AvRecorder/test/d3d/gen_frame.h
  36. 0 14
      AvRecorder/test/encoder/abstract_encoder.cpp
  37. 0 31
      AvRecorder/test/encoder/abstract_encoder.h
  38. 0 52
      AvRecorder/test/encoder/audio_encoder.cpp
  39. 0 24
      AvRecorder/test/encoder/audio_encoder.h
  40. 0 329
      AvRecorder/test/encoder/audio_mixer.cpp
  41. 0 100
      AvRecorder/test/encoder/audio_mixer.h
  42. 0 194
      AvRecorder/test/encoder/video_encoder.cpp
  43. 0 47
      AvRecorder/test/encoder/video_encoder.h
  44. 0 150
      AvRecorder/test/muxer/av_muxer.cpp
  45. 0 43
      AvRecorder/test/muxer/av_muxer.h
  46. 0 105
      AvRecorder/test/recorder/audio_recorder.cpp
  47. 0 49
      AvRecorder/test/recorder/audio_recorder.h
  48. 0 109
      AvRecorder/test/recorder/video_recorder.cpp
  49. 0 44
      AvRecorder/test/recorder/video_recorder.h
  50. 0 37
      AvRecorder/test/ui/audio_render.cpp
  51. 0 17
      AvRecorder/test/ui/audio_render.h
  52. 0 49
      AvRecorder/test/ui/audio_widget.cpp
  53. 0 37
      AvRecorder/test/ui/audio_widget.h
  54. 0 386
      AvRecorder/test/ui/av_recorder.cpp
  55. 0 67
      AvRecorder/test/ui/av_recorder.h
  56. 0 341
      AvRecorder/test/ui/opengl_video_widget.cpp
  57. 0 59
      AvRecorder/test/ui/opengl_video_widget.h
  58. 0 146
      AvRecorder/test/ui/settings_page.cpp
  59. 0 50
      AvRecorder/test/ui/settings_page.h
  60. BIN
      bin/2025-07-12-20-11-39.mp4
  61. BIN
      bin/2025-07-12-20-11-58.mp4

+ 0 - 4
AvRecorder/test/basic/basic.cpp

@@ -1,4 +0,0 @@
-
-#include "basic/basic.h"
-
-std::mutex __mtx;

+ 0 - 85
AvRecorder/test/basic/basic.h

@@ -1,85 +0,0 @@
-#ifndef __BASIC_FUCN_H__
-#define __BASIC_FUCN_H__
-#define __STDC_FORMAT_MACROS
-
-#include <functional>
-#include <mutex>
-#include <thread>
-
-extern "C" {
-#include <libavcodec/avcodec.h>
-#include <libavformat/avformat.h>
-}
-
-// ***************
-// MUTEX
-extern std::mutex __mtx;
-
-// ***************
-// debug function
-
-#define __AVDEBUG
-
-#ifdef __AVDEBUG
-#define __DebugPrint(fmtStr, ...) \
-    std::printf("[" __FILE__ ", line:%d] " fmtStr "\n", __LINE__, ##__VA_ARGS__)
-#define __Str(exp) #exp
-#define __Check(retVal, ...)                            \
-    do {                                                \
-        if (!(__VA_ARGS__)) {                           \
-            __DebugPrint(__Str(__VA_ARGS__) " failed"); \
-            return retVal;                              \
-        }                                               \
-    } while (false)
-
-#else
-#define __DebugPrint(fmtStr, ...)
-#define __Check(retVal, ...)  \
-    do {                      \
-        if (!(__VA_ARGS__)) { \
-            return retVal;    \
-        }                     \
-    } while (false)
-#endif
-
-#define __CheckNo(...) __Check(, __VA_ARGS__)
-#define __CheckBool(...) __Check(false, __VA_ARGS__)
-#define __CheckNullptr(...) __Check(nullptr, __VA_ARGS__)
-
-enum class MediaType {
-    AUDIO,
-    VIDEO
-};
-
-// ***************
-// memory function
-
-template <typename T, typename Func>
-void Free(T*& ptr, Func&& func)
-{
-    static_assert(std::is_convertible_v<Func, std::function<void()>>, "Type Func should be std::function<void()>");
-    if (ptr == nullptr) {
-        return;
-    }
-
-    func();
-    ptr = nullptr;
-}
-
-//***************
-// time function
-
-// Sleep x ms
-inline void SleepMs(int timeMs)
-{
-    std::this_thread::sleep_for(std::chrono::milliseconds(timeMs));
-}
-
-// 对于音频编码器的全局设置
-constexpr int AUDIO_SAMPLE_RATE = 48000;
-constexpr int AUDIO_CHANNEL = 1;
-constexpr AVSampleFormat AUDIO_FMT = AV_SAMPLE_FMT_FLTP;
-constexpr int MICROPHONE_INDEX = 0;
-constexpr int SPEAKER_INDEX = 1;
-
-#endif

+ 0 - 102
AvRecorder/test/basic/frame.cpp

@@ -1,102 +0,0 @@
-
-#include "basic/frame.h"
-
-extern "C" {
-#include <libswscale/swscale.h>
-}
-
-AVFrame* Frame<MediaType::AUDIO>::Alloc(AVSampleFormat sampleFmt,
-    const AVChannelLayout* channel_layout,
-    int sampleRate, int nbSamples)
-{
-    AVFrame* frame = nullptr;
-    __CheckNullptr(frame = av_frame_alloc());
-    frame->format = sampleFmt;
-    av_channel_layout_copy(&frame->ch_layout, channel_layout);
-    frame->sample_rate = sampleRate;
-    frame->nb_samples = nbSamples;
-
-    /* allocate the buffers for the frame data */
-    __CheckNullptr(av_frame_get_buffer(frame, 0) >= 0);
-    return frame;
-}
-
-Frame<MediaType::AUDIO>::Frame(AVSampleFormat sampleFmt,
-    const AVChannelLayout* channel_layout, int sampleRate,
-    int nbSamples)
-{
-    __CheckNo(frame = Alloc(sampleFmt, channel_layout, sampleRate, nbSamples));
-}
-
-Frame<MediaType::AUDIO>::Frame(AVFrame* frame)
-{
-    if (frame == nullptr) {
-        this->frame = nullptr;
-        return;
-    }
-    __CheckNo(this->frame = Alloc(AVSampleFormat(frame->format), &frame->ch_layout, frame->sample_rate, frame->nb_samples));
-    __CheckNo(av_frame_copy(this->frame, frame) >= 0);
-}
-
-Frame<MediaType::VIDEO>::Frame(AVPixelFormat pixFmt, int width, int height)
-{
-    __CheckNo(frame = Alloc(pixFmt, width, height));
-}
-
-AVFrame* Frame<MediaType::VIDEO>::Alloc(AVPixelFormat pixFmt, int width, int height)
-{
-    AVFrame* frame = nullptr;
-    __CheckNullptr(frame = av_frame_alloc());
-
-    frame->format = pixFmt;
-    frame->width = width;
-    frame->height = height;
-
-    /* allocate the buffers for the frame data */
-    __CheckNullptr(av_frame_get_buffer(frame, 0) >= 0);
-    return frame;
-}
-
-Frame<MediaType::VIDEO>::Frame(AVFrame* frame)
-{
-    if (frame == nullptr) {
-        this->frame = nullptr;
-        return;
-    }
-    __CheckNo(this->frame = Alloc(AVPixelFormat(frame->format), frame->width, frame->height));
-    __CheckNo(av_frame_copy(this->frame, frame) >= 0);
-}
-
-bool FfmpegConverter::SetSize(int width, int height)
-{
-    Free(_swsCtx, [this] { sws_freeContext(_swsCtx); });
-    Free(_frameTo, [this] { av_frame_free(&_frameTo); });
-    // 创建格式转换
-    __CheckBool(_swsCtx = sws_getContext(
-                    width, height, _from,
-                    width, height, _to,
-                    0, NULL, NULL, NULL));
-
-    __CheckBool(_frameTo = Frame<MediaType::VIDEO>::Alloc(_to, width, height));
-    return true;
-}
-
-AVFrame* FfmpegConverter::Trans(AVFrame* frameFrom)
-{
-    // 如果是空指针,直接把缓存返回
-    if (frameFrom == nullptr) {
-        return _frameTo;
-    }
-    __CheckNullptr(
-        sws_scale(_swsCtx, (const uint8_t* const*)frameFrom->data,
-            frameFrom->linesize, 0, frameFrom->height, _frameTo->data,
-            _frameTo->linesize)
-        >= 0);
-    return _frameTo;
-}
-
-FfmpegConverter::~FfmpegConverter()
-{
-    Free(_swsCtx, [this] { sws_freeContext(_swsCtx); });
-    Free(_frameTo, [this] { av_frame_free(&_frameTo); });
-}

+ 0 - 78
AvRecorder/test/basic/frame.h

@@ -1,78 +0,0 @@
-#ifndef __FRAME_H__
-#define __FRAME_H__
-#include "basic/basic.h"
-
-class __BasicFrame {
-public:
-    AVFrame* frame = nullptr;
-    __BasicFrame() = default;
-    __BasicFrame(__BasicFrame&& rhs) noexcept
-    {
-        frame = rhs.frame;
-        rhs.frame = nullptr;
-    }
-    __BasicFrame& operator=(__BasicFrame&& rhs)
-    {
-        Free(frame, [this] { av_frame_free(&frame); });
-        frame = rhs.frame;
-        rhs.frame = nullptr;
-        return *this;
-    }
-    __BasicFrame(const __BasicFrame& rhs) = delete;
-    __BasicFrame& operator=(const __BasicFrame& rhs) = delete;
-    ~__BasicFrame()
-    {
-        Free(frame, [this] { av_frame_free(&frame); });
-    }
-};
-
-template <MediaType mediaType>
-class Frame;
-
-template <>
-class Frame<MediaType::AUDIO> : public __BasicFrame {
-public:
-    static AVFrame* Alloc(AVSampleFormat sampleFmt,
-        const AVChannelLayout* channel_layout,
-        int sampleRate, int nbSamples);
-
-    Frame(AVSampleFormat sampleFmt,
-        const AVChannelLayout* channel_layout, int sampleRate,
-        int nbSamples);
-
-    Frame(AVFrame* frame);
-    Frame() = default;
-};
-
-template <>
-class Frame<MediaType::VIDEO> : public __BasicFrame {
-public:
-    static AVFrame* Alloc(AVPixelFormat pixFmt, int width, int height);
-    Frame(AVPixelFormat pixFmt, int width, int height);
-    Frame(AVFrame* frame);
-    Frame() = default;
-};
-
-struct SwsContext;
-
-class FfmpegConverter {
-private:
-    AVPixelFormat _from;
-    AVPixelFormat _to;
-
-public:
-    FfmpegConverter(AVPixelFormat from, AVPixelFormat to)
-        : _from(from)
-        , _to(to)
-    {
-    }
-    bool SetSize(int width, int height);
-    AVFrame* Trans(AVFrame* frameFrom);
-    ~FfmpegConverter();
-
-private:
-    AVFrame* _frameTo = nullptr;
-    SwsContext* _swsCtx = nullptr;
-};
-
-#endif

+ 0 - 88
AvRecorder/test/basic/timer.h

@@ -1,88 +0,0 @@
-#ifndef __TIMER_H__
-#define __TIMER_H__
-
-#include "basic/basic.h"
-
-#include <functional>
-
-class Timer
-{
-public:
-    ~Timer() { Stop(); }
-
-    // interval 为 0 表示时刻执行
-    template<typename Func>
-    void Start(int fps, Func&& func)
-    {
-        static_assert(std::is_convertible_v<Func, std::function<void()>>,
-                      "func need to be std::function<void()>");
-        _fps = fps;
-        _tickCnt = 0;
-        _isOverload = false;
-        __CheckNo(!_isRunning);
-        using namespace std::chrono;
-        _isRunning = true;
-        _beginTime = high_resolution_clock::now();
-        if (_fps > 0) {
-            auto task = [this, func = std::forward<Func>(func)]() mutable {
-                while (_isRunning) {
-                    // 这里不能直接使用整数除法
-                    // 因为整数除法有截断,导致最终睡眠的时间少一些
-                    uint64_t goalTime = int((double(1000.0) / _fps * _tickCnt) + 0.5);
-                    ++_tickCnt;
-                    auto nowTime = high_resolution_clock::now();
-                    auto duration = duration_cast<milliseconds>(nowTime - _beginTime).count();
-                    int64_t sleepTime = goalTime - duration;
-                    if (sleepTime > 0) {
-                        SleepMs(sleepTime);
-                    }
-#ifdef __AVDEBUG
-                    // else if (sleepTime < 0) {
-                    //     printf("Time out : %lld\n", -sleepTime);
-                    // }
-#endif
-                    _isOverload = -sleepTime > 1000; // 捕获的过载时间设置为 1s
-                    func();
-                }
-            };
-            _thread = new std::thread(std::move(task));
-            // timeBeginPeriod(1);
-            return;
-        }
-
-        auto task = [this, func = std::forward<Func>(func)]() mutable {
-            while (_isRunning) {
-                func();
-            }
-        };
-        _thread = new std::thread(std::move(task));
-    }
-
-    void Stop()
-    {
-        _isRunning = false;
-        if (_thread == nullptr) {
-            return;
-        }
-        // if (_fps > 0) {
-        //     timeEndPeriod(1);
-        // }
-        _thread->join();
-        delete _thread;
-
-        _thread = nullptr;
-    }
-
-    bool IsOverload() const { return _isOverload; }
-
-private:
-    int _fps = 100;
-    int _isRunning = false;
-    int _isOverload = false;
-    std::vector<int> vec;
-    std::chrono::time_point<std::chrono::high_resolution_clock> _beginTime;
-    std::thread* _thread = nullptr;
-    uint64_t _tickCnt = 0;
-};
-
-#endif

+ 0 - 182
AvRecorder/test/capturer/audio_capturer.cpp

@@ -1,182 +0,0 @@
-#include "audio_capturer.h"
-
-#include "basic/basic.h"
-
-#define DEFAULT_SAMPLE_RATE 48000        // 默认采样率:48kHz
-#define DEFAULT_BITS_PER_SAMPLE 16       // 默认位深:16bit
-#define DEFAULT_CHANNELS 1               // 默认音频通道数:1
-#define DEFAULT_AUDIO_PACKET_INTERVAL 10 // 默认音频包发送间隔:10ms
-
-bool AudioCapturer::Init(Type deviceType, CallBack callback, void* userInfo)
-{
-    Stop();
-    _userInfo = userInfo;
-    _callback = callback;
-    _deviceType = deviceType;
-    __CheckBool(_CreateDeviceEnumerator(&_pDeviceEnumerator));
-    __CheckBool(_CreateDevice(_pDeviceEnumerator, &_pDevice));
-    __CheckBool(_CreateAudioClient(_pDevice, &_pAudioClient));
-
-    if (!_IsFormatSupported(_pAudioClient)) {
-        __CheckBool(_GetPreferFormat(_pAudioClient, &_formatex));
-    }
-    __CheckBool(_InitAudioClient(_pAudioClient, &_formatex));
-    __CheckBool(_CreateAudioCaptureClient(_pAudioClient, &_pAudioCaptureClient));
-    _isInit = true;
-    return true;
-}
-
-bool AudioCapturer::Start()
-{
-    __CheckBool(_isInit);
-    _loopFlag = true;
-    // 用于强制打开扬声器
-    PlaySoundA("./rc/mute.wav", nullptr, SND_FILENAME | SND_ASYNC | SND_LOOP);
-    _captureThread = new std::thread(
-        [this] { _ThreadRun(_pAudioClient, _pAudioCaptureClient); });
-    return true;
-}
-
-void AudioCapturer::Stop()
-{
-    // CoUninitialize();
-    _isInit = false;
-    _loopFlag = false;
-    Free(_captureThread, [this] {
-        _captureThread->join();
-        delete _captureThread;
-    });
-    Free(_pAudioCaptureClient, [this] { _pAudioCaptureClient->Release(); });
-    if (_pAudioClient != nullptr) {
-        _pAudioClient->Stop();
-    }
-    PlaySoundA(nullptr, nullptr, SND_FILENAME | SND_ASYNC | SND_LOOP);
-
-    Free(_pAudioClient, [this] { _pAudioClient->Release(); });
-    Free(_pDevice, [this] { _pDevice->Release(); });
-    Free(_pDeviceEnumerator, [this] { _pDeviceEnumerator->Release(); });
-}
-
-bool AudioCapturer::_CreateDeviceEnumerator(IMMDeviceEnumerator** enumerator)
-{
-    // __CheckBool(SUCCEEDED(CoInitializeEx(nullptr, COINIT_MULTITHREADED)));
-    // __CheckBool(SUCCEEDED(CoInitializeEx(nullptr, COINIT_APARTMENTTHREADED)));
-    __CheckBool(SUCCEEDED(CoCreateInstance(__uuidof(MMDeviceEnumerator), NULL, CLSCTX_ALL,
-        __uuidof(IMMDeviceEnumerator),
-        reinterpret_cast<void**>(enumerator))));
-    return true;
-}
-bool AudioCapturer::_CreateDevice(IMMDeviceEnumerator* enumerator, IMMDevice** device)
-{
-    EDataFlow enDataFlow = _deviceType == Microphone ? eCapture : eRender;
-    ERole enRole = eConsole;
-    __CheckBool(SUCCEEDED(enumerator->GetDefaultAudioEndpoint(enDataFlow, enRole, device)));
-    return true;
-}
-bool AudioCapturer::_CreateAudioClient(IMMDevice* device, IAudioClient** audioClient)
-{
-    __CheckBool(SUCCEEDED(device->Activate(__uuidof(IAudioClient), CLSCTX_ALL, NULL,
-        (void**)audioClient)));
-    return true;
-}
-bool AudioCapturer::_IsFormatSupported(IAudioClient* audioClient)
-{
-    memset(&_formatex, 0, sizeof(_formatex));
-    WAVEFORMATEX* format = &_formatex.Format;
-    format->nSamplesPerSec = DEFAULT_SAMPLE_RATE;
-    format->wBitsPerSample = DEFAULT_BITS_PER_SAMPLE;
-    format->nChannels = DEFAULT_CHANNELS;
-
-    WAVEFORMATEX* closestMatch = nullptr;
-
-    HRESULT hr = audioClient->IsFormatSupported(AUDCLNT_SHAREMODE_SHARED,
-        format, &closestMatch);
-    if (hr == AUDCLNT_E_UNSUPPORTED_FORMAT) // 0x88890008
-    {
-        if (closestMatch == nullptr) // 如果找不到最相近的格式,closestMatch可能为nullptr
-        {
-            return false;
-        }
-
-        format->nSamplesPerSec = closestMatch->nSamplesPerSec;
-        format->wBitsPerSample = closestMatch->wBitsPerSample;
-        format->nChannels = closestMatch->nChannels;
-
-        return true;
-    }
-
-    return false;
-}
-bool AudioCapturer::_GetPreferFormat(IAudioClient* audioClient,
-    WAVEFORMATEXTENSIBLE* formatex)
-{
-    WAVEFORMATEX* format = nullptr;
-    __CheckBool(SUCCEEDED(audioClient->GetMixFormat(&format)));
-    formatex->Format.nSamplesPerSec = format->nSamplesPerSec;
-    formatex->Format.wBitsPerSample = format->wBitsPerSample;
-    formatex->Format.nChannels = format->nChannels;
-    return true;
-}
-bool AudioCapturer::_InitAudioClient(IAudioClient* audioClient,
-    WAVEFORMATEXTENSIBLE* formatex)
-{
-    AUDCLNT_SHAREMODE shareMode = AUDCLNT_SHAREMODE_SHARED; // share Audio Engine with other applications
-    DWORD streamFlags = _deviceType == Microphone ? 0 : AUDCLNT_STREAMFLAGS_LOOPBACK;
-    streamFlags |= AUDCLNT_STREAMFLAGS_AUTOCONVERTPCM;      // A channel matrixer and a sample
-                                                            // rate converter are inserted
-    streamFlags |= AUDCLNT_STREAMFLAGS_SRC_DEFAULT_QUALITY; // a sample rate converter
-                                                            // with better quality than
-                                                            // the default conversion but
-                                                            // with a higher performance
-                                                            // cost is used
-    REFERENCE_TIME hnsBufferDuration = 0;
-    WAVEFORMATEX* format = &formatex->Format;
-    format->wFormatTag = WAVE_FORMAT_EXTENSIBLE;
-    format->nBlockAlign = (format->wBitsPerSample >> 3) * format->nChannels;
-    format->nAvgBytesPerSec = format->nBlockAlign * format->nSamplesPerSec;
-    format->cbSize = sizeof(WAVEFORMATEXTENSIBLE) - sizeof(WAVEFORMATEX);
-    formatex->Samples.wValidBitsPerSample = format->wBitsPerSample;
-    formatex->dwChannelMask = format->nChannels == 1 ? KSAUDIO_SPEAKER_MONO : KSAUDIO_SPEAKER_STEREO;
-    formatex->SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
-
-    __CheckBool(SUCCEEDED(audioClient->Initialize(shareMode, streamFlags, hnsBufferDuration, 0,
-        format, nullptr)));
-    return true;
-}
-
-bool AudioCapturer::_CreateAudioCaptureClient(IAudioClient* audioClient,
-    IAudioCaptureClient** audioCaptureClient)
-{
-    __CheckBool(SUCCEEDED(audioClient->GetService(IID_PPV_ARGS(audioCaptureClient))));
-    return true;
-}
-
-bool AudioCapturer::_ThreadRun(IAudioClient* audio_client,
-    IAudioCaptureClient* audio_capture_client)
-{
-    UINT32 num_success = 0;
-    BYTE* p_audio_data = nullptr;
-    UINT32 num_frames_to_read = 0;
-    DWORD dw_flag = 0;
-    UINT32 num_frames_in_next_packet = 0;
-    audio_client->Start();
-    while (_loopFlag) {
-        SleepMs(5);
-        while (true) {
-            __CheckBool(SUCCEEDED(audio_capture_client->GetNextPacketSize(&num_frames_in_next_packet)));
-            if (num_frames_in_next_packet == 0) {
-                break;
-            }
-
-            __CheckBool(SUCCEEDED(audio_capture_client->GetBuffer(&p_audio_data, &num_frames_to_read,
-                &dw_flag, nullptr, nullptr)));
-
-            size_t size = (_formatex.Format.wBitsPerSample >> 3) * _formatex.Format.nChannels * num_frames_to_read;
-            _callback(p_audio_data, size, _userInfo);
-            __CheckBool(SUCCEEDED(audio_capture_client->ReleaseBuffer(num_frames_to_read)));
-        }
-    }
-
-    audio_client->Stop();
-    return true;
-}

+ 0 - 53
AvRecorder/test/capturer/audio_capturer.h

@@ -1,53 +0,0 @@
-
-#ifndef __AUDIO_CAPTURER_H__
-#define __AUDIO_CAPTURER_H__
-
-#include <audioclient.h>
-#include <combaseapi.h>
-#include <mmdeviceapi.h>
-
-#include <memory>
-#include <thread>
-
-class AudioCapturer {
-public:
-    enum Type {
-        Microphone,
-        Speaker
-    };
-    using CallBack = void (*)(void* data, size_t size, void* userInfo);
-
-    bool Init(Type deviceType, CallBack callback, void* userInfo = nullptr);
-    bool Start();
-    const WAVEFORMATEX& GetFormat() const { return _formatex.Format; }
-
-    void Stop();
-
-private:
-    bool _isInit = false;
-    CallBack _callback;
-    Type _deviceType;
-    IMMDeviceEnumerator* _pDeviceEnumerator = nullptr;
-    IMMDevice* _pDevice = nullptr;
-    IAudioClient* _pAudioClient = nullptr;
-    IAudioCaptureClient* _pAudioCaptureClient = nullptr;
-    std::thread* _captureThread = nullptr;
-    bool _loopFlag = false;
-    WAVEFORMATEXTENSIBLE _formatex;
-    void* _userInfo = nullptr;
-
-    bool _CreateDeviceEnumerator(IMMDeviceEnumerator** enumerator);
-    bool _CreateDevice(IMMDeviceEnumerator* enumerator, IMMDevice** device);
-    bool _CreateAudioClient(IMMDevice* device, IAudioClient** audioClient);
-    bool _IsFormatSupported(IAudioClient* audioClient);
-    bool _GetPreferFormat(IAudioClient* audioClient,
-        WAVEFORMATEXTENSIBLE* formatex);
-    bool _InitAudioClient(IAudioClient* audioClient,
-        WAVEFORMATEXTENSIBLE* formatex);
-    bool _CreateAudioCaptureClient(IAudioClient* audioClient,
-        IAudioCaptureClient** audioCaptureClient);
-    bool _ThreadRun(IAudioClient* audio_client,
-        IAudioCaptureClient* audio_capture_client);
-};
-
-#endif

+ 0 - 226
AvRecorder/test/capturer/dxgi_capturer.cpp

@@ -1,226 +0,0 @@
-#include "dxgi_capturer.h"
-#include <windows.h>
-
-DxgiCapturer::DxgiCapturer()
-{
-    ZeroMemory(&_desc, sizeof(_desc));
-}
-
-DxgiCapturer::~DxgiCapturer()
-{
-    Close();
-}
-
-bool DxgiCapturer::Open(int left, int top, int width, int height)
-{
-    Close();
-    HRESULT hr = S_OK;
-    _isAttached = false;
-
-    if (_bInit) {
-        return false;
-    }
-
-    // Driver types supported
-    D3D_DRIVER_TYPE DriverTypes[] = {
-        D3D_DRIVER_TYPE_HARDWARE,
-        D3D_DRIVER_TYPE_WARP,
-        D3D_DRIVER_TYPE_REFERENCE,
-    };
-    UINT NumDriverTypes = ARRAYSIZE(DriverTypes);
-
-    // Feature levels supported
-    D3D_FEATURE_LEVEL FeatureLevels[] = {
-        D3D_FEATURE_LEVEL_11_0,
-        D3D_FEATURE_LEVEL_10_1,
-        D3D_FEATURE_LEVEL_10_0,
-        D3D_FEATURE_LEVEL_9_1};
-    UINT NumFeatureLevels = ARRAYSIZE(FeatureLevels);
-
-    D3D_FEATURE_LEVEL FeatureLevel;
-
-    // Create D3D device
-    for (UINT DriverTypeIndex = 0; DriverTypeIndex < NumDriverTypes; ++DriverTypeIndex) {
-        hr = D3D11CreateDevice(nullptr, DriverTypes[DriverTypeIndex], nullptr, 0, FeatureLevels,
-            NumFeatureLevels, D3D11_SDK_VERSION, &_hDevice, &FeatureLevel, &_hContext);
-        if (SUCCEEDED(hr)) {
-            break;
-        }
-    }
-    __CheckBool(SUCCEEDED(hr));
-
-    // Get DXGI device
-    IDXGIDevice* hDxgiDevice = nullptr;
-    __CheckBool(SUCCEEDED(_hDevice->QueryInterface(__uuidof(IDXGIDevice), reinterpret_cast<void**>(&hDxgiDevice))));
-
-    // Get DXGI adapter
-    IDXGIAdapter* hDxgiAdapter = nullptr;
-    hr = hDxgiDevice->GetParent(__uuidof(IDXGIAdapter), reinterpret_cast<void**>(&hDxgiAdapter));
-    Free(hDxgiDevice, [=] { hDxgiDevice->Release(); });
-    __CheckBool(SUCCEEDED(hr));
-
-    // Get output
-    INT nOutput = 0;
-    IDXGIOutput* hDxgiOutput = nullptr;
-    DXGI_OUTPUT_DESC dxgiOutDesc;
-    ZeroMemory(&dxgiOutDesc, sizeof(dxgiOutDesc));
-
-    for (int idx = 0; SUCCEEDED(hr = hDxgiAdapter->EnumOutputs(idx, &hDxgiOutput)); ++idx) {
-        // get output description struct
-        hDxgiOutput->GetDesc(&dxgiOutDesc);
-        if (dxgiOutDesc.DesktopCoordinates.left == left
-            && dxgiOutDesc.DesktopCoordinates.top == top) { // 寻找显示器
-            break;
-        }
-    }
-    Free(hDxgiAdapter, [=] { hDxgiAdapter->Release(); });
-    __CheckBool(SUCCEEDED(hr));
-
-    // QI for Output 1
-    IDXGIOutput1* hDxgiOutput1 = nullptr;
-    hr = hDxgiOutput->QueryInterface(__uuidof(hDxgiOutput1), reinterpret_cast<void**>(&hDxgiOutput1));
-    Free(hDxgiOutput, [=] { hDxgiOutput->Release(); });
-    __CheckBool(SUCCEEDED(hr));
-
-    // Create desktop duplication
-    hr = hDxgiOutput1->DuplicateOutput(_hDevice, &_hDeskDupl);
-    Free(hDxgiOutput1, [=] { hDxgiOutput1->Release(); });
-    __CheckBool(SUCCEEDED(hr));
-
-    // Set ColorSpace
-    D3D11_VIDEO_PROCESSOR_COLOR_SPACE inputColorSpace;
-    inputColorSpace.Usage = 1;
-    inputColorSpace.RGB_Range = 0;
-    inputColorSpace.YCbCr_Matrix = 1;
-    inputColorSpace.YCbCr_xvYCC = 0;
-    inputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_0_255;
-
-    D3D11_VIDEO_PROCESSOR_COLOR_SPACE outputColorSpace;
-    outputColorSpace.Usage = 0;
-    outputColorSpace.RGB_Range = 0;
-    outputColorSpace.YCbCr_Matrix = 1;
-    outputColorSpace.YCbCr_xvYCC = 0;
-    outputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_16_235;
-    _rgbToNv12.Open(_hDevice, _hContext, inputColorSpace, outputColorSpace);
-    _nv12Frame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_NV12, width, height);
-    _xrgbFrame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_BGR0, width, height);
-    __CheckBool(_nv12Frame);
-    __CheckBool(_xrgbFrame);
-    // 初始化成功
-    _bInit = true;
-    return true;
-}
-void DxgiCapturer::Close()
-{
-    if (!_bInit) {
-        return;
-    }
-
-    _bInit = false;
-    _nv12Buffers.Clear();
-    _xrgbBuffers.Clear();
-    _rgbToNv12.Close();
-    Free(_nv12Frame, [this] { av_frame_free(&_nv12Frame); });
-    Free(_xrgbFrame, [this] { av_frame_free(&_xrgbFrame); });
-    Free(_hDeskDupl, [this] { _hDeskDupl->Release(); });
-    Free(_hDevice, [this] { _hDevice->Release(); });
-    Free(_hContext, [this] { _hContext->Release(); });
-}
-
-HDC DxgiCapturer::GetHdc()
-{
-    _isCaptureSuccess = false;
-    if (!_bInit) {
-        return nullptr;
-    }
-
-    IDXGIResource* hDesktopResource = nullptr;
-    DXGI_OUTDUPL_FRAME_INFO FrameInfo;
-    HRESULT hr = _hDeskDupl->AcquireNextFrame(0, &FrameInfo, &hDesktopResource);
-    if (FAILED(hr)) {
-        if (hr == DXGI_ERROR_WAIT_TIMEOUT) { // 这里是因为当桌面没有动画更新时就会有一个错误值,不进行错误打印
-            return nullptr;
-        }
-        return nullptr;
-    }
-
-    // query next frame staging buffer
-    ID3D11Texture2D* srcImage = nullptr;
-    hr = hDesktopResource->QueryInterface(__uuidof(ID3D11Texture2D), reinterpret_cast<void**>(&srcImage));
-    Free(hDesktopResource, [=] { hDesktopResource->Release(); });
-    __CheckNullptr(SUCCEEDED(hr));
-
-    srcImage->GetDesc(&_desc);
-
-    // create a new staging buffer for fill frame image
-    auto desc = _desc;
-    desc.ArraySize = 1;
-    desc.BindFlags = D3D11_BIND_FLAG::D3D11_BIND_RENDER_TARGET;
-    desc.MiscFlags = D3D11_RESOURCE_MISC_GDI_COMPATIBLE;
-    desc.SampleDesc.Count = 1;
-    desc.SampleDesc.Quality = 0;
-    desc.MipLevels = 1;
-    desc.CPUAccessFlags = 0;
-    desc.Usage = D3D11_USAGE_DEFAULT;
-    hr = _hDevice->CreateTexture2D(&desc, nullptr, &_gdiImage);
-    if (FAILED(hr)) {
-        __DebugPrint("Create _gdiImage failed");
-        Free(srcImage, [=] { srcImage->Release(); });
-        Free(_hDeskDupl, [this] { _hDeskDupl->ReleaseFrame(); });
-        return nullptr;
-    }
-
-    // copy next staging buffer to new staging buffer
-    _hContext->CopyResource(_gdiImage, srcImage);
-    Free(srcImage, [=] { srcImage->Release(); });
-    _hDeskDupl->ReleaseFrame();
-
-    // create staging buffer for map bits
-    _hStagingSurf = nullptr;
-    hr = _gdiImage->QueryInterface(__uuidof(IDXGISurface), (void**)(&_hStagingSurf));
-    if (FAILED(hr)) {
-        __DebugPrint("_gdiImage->QueryInterface failed");
-        Free(_gdiImage, [this] { _gdiImage->Release(); });
-        return nullptr;
-    }
-
-    _isCaptureSuccess = true;
-    HDC hdc = nullptr;
-    // if GetDc is failed, the hdc is nullptr
-    _hStagingSurf->GetDC(FALSE, &hdc);
-    return hdc;
-}
-
-AVFrame* DxgiCapturer::GetFrame()
-{
-    if (!_isCaptureSuccess) {
-        return nullptr;
-    }
-    _isCaptureSuccess = false;
-    _hStagingSurf->ReleaseDC(nullptr);
-
-    // 创建一个临时的纹理
-    ID3D11Texture2D* tmpImage = nullptr;
-    _desc.MiscFlags = 2050;
-    __CheckNullptr(SUCCEEDED(_hDevice->CreateTexture2D(&_desc, nullptr, &tmpImage)));
-    _hContext->CopyResource(tmpImage, _gdiImage);
-
-    // 首先尝试创建 NV12 纹理
-    AVFrame* frame = nullptr;
-    auto tmpFormat = _desc.Format;
-    _desc.Format = DXGI_FORMAT_NV12;
-    if (GenNv12Frame(_hDevice, _hContext, _desc, tmpImage,
-            _nv12Buffers, _nv12Frame, _rgbToNv12)) {
-        frame = _nv12Frame;
-    } else {
-        _desc.Format = tmpFormat;
-        GenRgbFrame(_hDevice, _hContext, _desc, _gdiImage,
-            _xrgbBuffers, _xrgbFrame);
-        frame = _xrgbFrame;
-    }
-    Free(_hStagingSurf, [this] { _hStagingSurf->Release(); });
-    Free(tmpImage, [&tmpImage] { tmpImage->Release(); });
-    Free(_gdiImage, [this] { _gdiImage->Release(); });
-
-    return frame;
-}

+ 0 - 39
AvRecorder/test/capturer/dxgi_capturer.h

@@ -1,39 +0,0 @@
-#ifndef __DXGI_CAPTURER_H__
-#define __DXGI_CAPTURER_H__
-
-#include <d3d11.h>
-#include <dxgi1_2.h>
-
-#include "d3d/gen_frame.h"
-class DxgiCapturer {
-public:
-    DxgiCapturer();
-    ~DxgiCapturer();
-
-public:
-    bool Open(int left, int top, int width, int height);
-    void Close();
-
-public:
-    HDC GetHdc();
-    AVFrame* GetFrame();
-
-private:
-    bool _bInit = false;
-    bool _isCaptureSuccess = false;
-
-    ID3D11Device* _hDevice = nullptr;
-    ID3D11DeviceContext* _hContext = nullptr;
-    IDXGIOutputDuplication* _hDeskDupl = nullptr;
-    IDXGISurface1* _hStagingSurf = nullptr;
-    ID3D11Texture2D* _gdiImage = nullptr;
-    D3D11_TEXTURE2D_DESC _desc;
-    bool _isAttached = false;
-    AVFrame* _xrgbFrame = nullptr;
-    AVFrame* _nv12Frame = nullptr;
-    BufferFiller _xrgbBuffers;
-    BufferFiller _nv12Buffers;
-    D3dConverter _rgbToNv12;
-};
-
-#endif

+ 0 - 98
AvRecorder/test/capturer/finder.cpp

@@ -1,98 +0,0 @@
-#include "finder.h"
-
-#include <Windows.h>
-#include <array>
-
-const std::vector<WindowFinder::Info>& WindowFinder::GetList(bool isUpdate)
-{
-    if (!isUpdate) {
-        return _list;
-    }
-    _list.clear();
-    EnumWindows(_EnumWindowsProc, (LPARAM) nullptr);
-    return _list;
-}
-
-std::vector<WindowFinder::Info> WindowFinder::_list;
-
-std::wstring WindowFinder::_GetWindowTextStd(HWND hwnd)
-{
-    std::array<WCHAR, 1024> windowText;
-    ::GetWindowTextW(hwnd, windowText.data(), (int)windowText.size());
-    std::wstring title(windowText.data());
-    return title;
-}
-BOOL CALLBACK WindowFinder::_EnumWindowsProc(HWND hwnd, LPARAM lParam)
-{
-    auto title = _GetWindowTextStd(hwnd);
-    if (!IsAltTabWindow(hwnd, title)) {
-        return TRUE;
-    }
-    _list.push_back({hwnd, std::move(title)});
-    return TRUE;
-}
-
-bool WindowFinder::IsAltTabWindow(HWND hwnd, const std::wstring& title)
-{
-    HWND shellWindow = GetShellWindow();
-
-    if (hwnd == shellWindow) {
-        return false;
-    }
-
-    if (title.length() == 0 || title == L"NVIDIA GeForce Overlay") {
-        return false;
-    }
-
-    if (!IsWindowVisible(hwnd)) {
-        return false;
-    }
-
-    if (GetAncestor(hwnd, GA_ROOT) != hwnd) {
-        return false;
-    }
-
-    LONG style = GetWindowLong(hwnd, GWL_STYLE);
-    if (!((style & WS_DISABLED) != WS_DISABLED)) {
-        return false;
-    }
-
-    DWORD cloaked = FALSE;
-    HRESULT hrTemp = DwmGetWindowAttribute(hwnd, DWMWA_CLOAKED, &cloaked, sizeof(cloaked));
-    if (SUCCEEDED(hrTemp) && cloaked == DWM_CLOAKED_SHELL) {
-        return false;
-    }
-
-    return !IsIconic(hwnd);
-}
-
-const std::vector<MonitorFinder::Info>& MonitorFinder::GetList(bool isUpdate)
-{
-    if (!isUpdate) {
-        return _list;
-    }
-    _list.clear();
-    EnumDisplayMonitors(nullptr, nullptr, _MonitorEnumProc, (LPARAM) nullptr);
-    return _list;
-}
-
-std::vector<MonitorFinder::Info> MonitorFinder::_list;
-
-BOOL CALLBACK MonitorFinder::_MonitorEnumProc(
-    HMONITOR hMonitor,  // handle to display monitor
-    HDC hdcMonitor,     // handle to monitor-appropriate device context
-    LPRECT lprcMonitor, // pointer to monitor intersection rectangle
-    LPARAM dwData       // data passed from EnumDisplayMonitors
-)
-{
-    std::wstring name = L"显示器" + std::to_wstring(_list.size() + 1);
-    MONITORINFO monitorInfo;
-    monitorInfo.cbSize = sizeof(monitorInfo);
-    GetMonitorInfoW(hMonitor, &monitorInfo);
-    Info info;
-    info.monitor = hMonitor;
-    info.rect = monitorInfo.rcMonitor;
-    info.title = std::move(name);
-    _list.push_back(std::move(info));
-    return TRUE;
-}

+ 0 - 43
AvRecorder/test/capturer/finder.h

@@ -1,43 +0,0 @@
-#pragma once
-
-#include <dwmapi.h>
-#include <string>
-#include <vector>
-#include <d3d11.h>
-
-class WindowFinder {
-public:
-    struct Info {
-        HWND hwnd = nullptr;
-        std::wstring title;
-    };
-
-    static const std::vector<Info>& GetList(bool isUpdate = false);
-
-private:
-    static std::vector<Info> _list;
-    static std::wstring _GetWindowTextStd(HWND hwnd);
-    static BOOL CALLBACK _EnumWindowsProc(HWND hwnd, LPARAM lParam);
-    static bool IsAltTabWindow(HWND hwnd, const std::wstring& title);
-};
-
-class MonitorFinder {
-public:
-    struct Info {
-        HMONITOR monitor = nullptr;
-        std::wstring title;
-        RECT rect;
-    };
-
-    static const std::vector<Info>& GetList(bool isUpdate = false);
-
-private:
-    static std::vector<Info> _list;
-
-    static BOOL CALLBACK _MonitorEnumProc(
-        HMONITOR hMonitor,  // handle to display monitor
-        HDC hdcMonitor,     // handle to monitor-appropriate device context
-        LPRECT lprcMonitor, // pointer to monitor intersection rectangle
-        LPARAM dwData       // data passed from EnumDisplayMonitors
-    );
-};

+ 0 - 56
AvRecorder/test/capturer/gdi_capturer.cpp

@@ -1,56 +0,0 @@
-
-#include "gdi_capturer.h"
-#include "basic/basic.h"
-
-bool GdiCapturer::Open(HWND hwnd, int width, int height)
-{
-    Close();
-    m_width = width;
-    m_height = height;
-    _srcHdc = GetWindowDC(hwnd);
-    _dstHdc = CreateCompatibleDC(_srcHdc);
-    _bitmap = CreateCompatibleBitmap(_srcHdc, width, height);
-    SelectObject(_dstHdc, _bitmap);
-
-    _bitmapInfo.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
-    _bitmapInfo.bmiHeader.biPlanes = 1;
-    _bitmapInfo.bmiHeader.biBitCount = 24;
-    _bitmapInfo.bmiHeader.biWidth = width;
-    _bitmapInfo.bmiHeader.biHeight = height;
-    _bitmapInfo.bmiHeader.biCompression = BI_RGB;
-    _bitmapInfo.bmiHeader.biSizeImage = width * height;
-
-    // 创建缓存帧
-    _frame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_BGR24, width, height);
-    return true;
-}
-
-HDC GdiCapturer::GetHdc(int borderWidth, int borderHeight)
-{
-    __CheckNullptr(
-        BitBlt(_dstHdc, 0, 0, m_width, m_height,
-            _srcHdc, borderWidth / 2, borderHeight - borderWidth / 2, SRCCOPY));
-
-    return _dstHdc;
-}
-
-AVFrame* GdiCapturer::GetFrame()
-{
-    auto linesize = _frame->linesize[0];
-    for (int row = 0; row < m_height; ++row) {
-        __CheckNullptr(GetDIBits(_dstHdc, _bitmap, m_height - 1 - row, 1, _frame->data[0] + row * linesize, &_bitmapInfo, DIB_RGB_COLORS));
-    }
-    return _frame;
-}
-
-void GdiCapturer::Close()
-{
-    Free(_frame, [this] { av_frame_free(&_frame); });
-    Free(_dstHdc, [this] { DeleteObject(_dstHdc); });
-    Free(_bitmap, [this] { DeleteObject(_bitmap); });
-}
-
-GdiCapturer::~GdiCapturer()
-{
-    Close();
-}

+ 0 - 26
AvRecorder/test/capturer/gdi_capturer.h

@@ -1,26 +0,0 @@
-#ifndef __GDI_CAPTURER_H__
-#define __GDI_CAPTURER_H__
-
-#include "basic/frame.h"
-
-#include <Windows.h>
-
-class GdiCapturer {
-public:
-    bool Open(HWND hwnd, int width, int height);
-    HDC GetHdc(int borderWidth, int borderHeight);
-    AVFrame* GetFrame();
-    void Close();
-    ~GdiCapturer();
-
-private:
-    HDC _srcHdc = nullptr;
-    HDC _dstHdc = nullptr;
-    HBITMAP _bitmap = nullptr;
-    BITMAPINFO _bitmapInfo;
-    int m_width = 0;
-    int m_height = 0;
-    AVFrame* _frame = nullptr;
-};
-
-#endif

+ 0 - 145
AvRecorder/test/capturer/video_capturer.cpp

@@ -1,145 +0,0 @@
-#include "video_capturer.h"
-#include "capturer/finder.h"
-
-bool VideoCapturer::Open(HWND hwnd, Method method)
-{
-    Close();
-    __CheckBool(hwnd);
-    m_srcHwnd = hwnd;
-    __CheckBool(_GetHwndSize(m_srcHwnd));
-    m_usingMethod = method;
-    m_type = WINDOW;
-    switch (method) {
-    case WGC: {
-        m_wgcCapturer = WgcCapturer::New();
-        __CheckBool(m_wgcCapturer->StartCapturerWindow(hwnd, m_width, m_height));
-        break;
-    }
-
-    default: { // GDI
-        m_gdiCapturer = new GdiCapturer;
-        __CheckBool(m_gdiCapturer->Open(hwnd, m_width, m_height));
-        break;
-    }
-    }
-
-    return true;
-}
-
-bool VideoCapturer::Open(int monitorIdx, Method method)
-{
-    Close();
-    auto&& monitorInfo = MonitorFinder::GetList()[monitorIdx];
-    m_rect = monitorInfo.rect;
-    m_borderHeight = 0;
-    m_borderWidth = 0;
-    m_width = m_rect.right - m_rect.left;
-    m_height = m_rect.bottom - m_rect.top;
-    m_usingMethod = method;
-    m_type = MONITOR;
-    switch (method) {
-    case WGC: {
-        auto monitor = monitorInfo.monitor;
-        m_wgcCapturer = WgcCapturer::New();
-        __CheckBool(m_wgcCapturer->StartCapturerMonitor(monitor, m_width, m_height));
-        break;
-    }
-
-    default: { // DXGI
-        m_dxgiCapturer = new DxgiCapturer;
-        __CheckBool(m_dxgiCapturer->Open(m_rect.left, m_rect.top, m_width, m_height));
-        break;
-    }
-    }
-    return true;
-}
-
-AVFrame* VideoCapturer::GetFrame()
-{
-    switch (m_usingMethod) {
-    case WGC: // 该捕获方式自动就将鼠标画好了,我们不需要再自己画鼠标
-        return m_wgcCapturer->GetFrame();
-    case DXGI: {
-        auto hdc = m_dxgiCapturer->GetHdc();
-        if (m_isDrawCursor && hdc) {
-            _DrawCursor(hdc);
-        }
-        return m_dxgiCapturer->GetFrame();
-    }
-    default: // GDI
-        auto hdc = m_gdiCapturer->GetHdc(m_borderWidth, m_borderHeight);
-        if (m_isDrawCursor && hdc) {
-            _DrawCursor(hdc);
-        }
-        return m_gdiCapturer->GetFrame();
-    }
-}
-
-void VideoCapturer::SetDrawCursor(bool isDrawCursor)
-{
-    m_isDrawCursor = isDrawCursor;
-    if (m_usingMethod == WGC) {
-        m_wgcCapturer->SetDrawCursor(m_isDrawCursor);
-    }
-}
-
-void VideoCapturer::Close()
-{
-    Free(m_dxgiCapturer, [this] { m_dxgiCapturer->Close(); delete m_dxgiCapturer; });
-    Free(m_gdiCapturer, [this] { m_gdiCapturer->Close(); delete m_gdiCapturer; });
-    Free(m_wgcCapturer, [this] { m_wgcCapturer->Close(); });
-}
-
-VideoCapturer::~VideoCapturer()
-{
-    Close();
-}
-
-int VideoCapturer::GetWidth() const
-{
-    return m_width;
-}
-int VideoCapturer::GetHeight() const
-{
-    return m_height;
-}
-
-bool VideoCapturer::_GetHwndSize(HWND hwnd)
-{
-    RECT rect;
-    __CheckBool(GetClientRect(hwnd, &rect));
-    m_rect = rect;
-    m_width = (rect.right - rect.left);
-    m_height = (rect.bottom - rect.top);
-    __CheckBool(GetWindowRect(hwnd, &rect));
-    m_borderHeight = rect.bottom - rect.top - m_height;
-    m_borderWidth = rect.right - rect.left - m_width;
-    if (m_borderHeight < 0) {
-        m_borderHeight = 0;
-    }
-    if (m_borderWidth < 0) {
-        m_borderWidth = 0;
-    }
-    return true;
-}
-
-void VideoCapturer::_DrawCursor(HDC hdc)
-{
-    CURSORINFO ci;
-    ci.cbSize = sizeof(CURSORINFO);
-    __CheckNo(GetCursorInfo(&ci));
-    int cursorX = ci.ptScreenPos.x;
-    int cursorY = ci.ptScreenPos.y;
-
-    if (cursorX > m_rect.right || cursorX < m_rect.left
-        || cursorY > m_rect.bottom || cursorY < m_rect.top) {
-        return; // 超出显示范围
-    }
-
-    if (ci.flags == CURSOR_SHOWING) {
-        // 将光标画到屏幕所在位置
-        int x = cursorX - m_rect.left;
-        int y = cursorY - m_rect.top;
-        __CheckNo(DrawIconEx(hdc, x, y, ci.hCursor, 0, 0, 0, NULL, DI_NORMAL | DI_COMPAT));
-    }
-}

+ 0 - 46
AvRecorder/test/capturer/video_capturer.h

@@ -1,46 +0,0 @@
-#ifndef __AV_CAPTURER_H__
-#define __AV_CAPTURER_H__
-
-#include "dxgi_capturer.h"
-#include "gdi_capturer.h"
-#include "wgc_capturer.h"
-
-class VideoCapturer {
-public:
-    enum Method {
-        GDI,
-        DXGI,
-        WGC
-    };
-
-    enum Type {
-        WINDOW,
-        MONITOR
-    };
-    ~VideoCapturer();
-    bool Open(HWND hwnd, Method method);
-    bool Open(int monitorIdx, Method method);
-    AVFrame* GetFrame();
-    void SetDrawCursor(bool isDrawCursor);
-    void Close();
-    int GetWidth() const;
-    int GetHeight() const;
-    Method GetMethod() const { return m_usingMethod; }
-
-private:
-    bool _GetHwndSize(HWND hwnd);
-    void _DrawCursor(HDC hdc);
-    Method m_usingMethod = WGC;
-    RECT m_rect;
-    Type m_type = MONITOR;
-    DxgiCapturer* m_dxgiCapturer = nullptr;
-    GdiCapturer* m_gdiCapturer = nullptr;
-    WgcCapturer* m_wgcCapturer = nullptr;
-    int m_width = 0;
-    int m_height = 0;
-    int m_borderHeight = 0;
-    int m_borderWidth = 0;
-    HWND m_srcHwnd = nullptr;
-    bool m_isDrawCursor = true;
-};
-#endif

+ 0 - 88
AvRecorder/test/capturer/wgc/App.cpp

@@ -1,88 +0,0 @@
-#include "App.h"
-// D3D
-#include <d2d1_3.h>
-#include <d3d11_4.h>
-#include <dxgi1_6.h>
-#include <wincodec.h>
-
-#include "pch.h"
-
-#include "basic/frame.h"
-
-using namespace winrt;
-using namespace Windows::System;
-using namespace Windows::Foundation;
-using namespace Windows::UI;
-using namespace Windows::UI::Composition;
-using namespace Windows::Graphics::Capture;
-
-void App::Initialize(ContainerVisual const& root)
-{
-    auto queue = DispatcherQueue::GetForCurrentThread();
-
-    m_compositor = root.Compositor();
-    m_root = m_compositor.CreateContainerVisual();
-    m_content = m_compositor.CreateSpriteVisual();
-    m_brush = m_compositor.CreateSurfaceBrush();
-
-    m_root.RelativeSizeAdjustment({1, 1});
-    root.Children().InsertAtTop(m_root);
-
-    m_content.AnchorPoint({0.5f, 0.5f});
-    m_content.RelativeOffsetAdjustment({0.5f, 0.5f, 0});
-    m_content.RelativeSizeAdjustment({1, 1});
-    m_content.Size({-80, -80});
-    m_content.Brush(m_brush);
-    m_brush.HorizontalAlignmentRatio(0.5f);
-    m_brush.VerticalAlignmentRatio(0.5f);
-    m_brush.Stretch(CompositionStretch::Uniform);
-    auto shadow = m_compositor.CreateDropShadow();
-    shadow.Mask(m_brush);
-    m_content.Shadow(shadow);
-    m_root.Children().InsertAtTop(m_content);
-
-    auto d3dDevice = CreateD3DDevice();
-    auto dxgiDevice = d3dDevice.as<IDXGIDevice>();
-    m_device = CreateDirect3DDevice(dxgiDevice.get());
-}
-
-void App::Close()
-{
-    if (m_capture) {
-        m_capture->Close();
-        delete m_capture;
-        m_capture = nullptr;
-    }
-}
-
-bool App::StartCaptureWindow(HWND hwnd, int width, int height)
-{
-    Close();
-    auto item = CreateCaptureItemForWindow(hwnd);
-    __CheckBool(item);
-    m_capture = new SimpleCapture(m_device, item, width, height);
-    auto surface = m_capture->CreateSurface(m_compositor);
-    m_brush.Surface(surface);
-    m_capture->StartCapture();
-    return true;
-}
-
-void App::SetDrawCursor(bool isDrawCursor)
-{
-    if (m_capture == nullptr) {
-        return;
-    }
-    m_capture->SetDrawCursor(isDrawCursor);
-}
-
-bool App::StartCaptureMonitor(HMONITOR monitor, int width, int height)
-{
-    Close();
-    auto item = CreateCaptureItemForMonitor(monitor);
-    __CheckBool(item);
-    m_capture = new SimpleCapture(m_device, item, width, height);
-    auto surface = m_capture->CreateSurface(m_compositor);
-    m_brush.Surface(surface);
-    m_capture->StartCapture();
-    return true;
-}

+ 0 - 46
AvRecorder/test/capturer/wgc/App.h

@@ -1,46 +0,0 @@
-#pragma once
-
-#include <guiddef.h>
-// WinRT
-
-#include <winrt/Windows.Foundation.Numerics.h>
-#include <winrt/Windows.Foundation.h>
-#include <winrt/Windows.Graphics.Capture.h>
-#include <winrt/Windows.Graphics.DirectX.Direct3d11.h>
-#include <winrt/Windows.Graphics.DirectX.h>
-#include <winrt/Windows.Graphics.Imaging.h>
-#include <winrt/Windows.Storage.Streams.h>
-#include <winrt/Windows.Storage.h>
-#include <winrt/Windows.System.h>
-#include <winrt/Windows.UI.Composition.Desktop.h>
-#include <winrt/Windows.UI.Composition.h>
-#include <winrt/Windows.UI.Popups.h>
-#include <winrt/Windows.UI.h>
-
-#include <winrt/Windows.Foundation.Collections.h>
-
-#include "SimpleCapture.h"
-
-class App {
-public:
-    App() { }
-    ~App() { }
-
-    void Initialize(
-        winrt::Windows::UI::Composition::ContainerVisual const& root);
-
-    bool StartCaptureWindow(HWND hwnd, int width, int height);
-    bool StartCaptureMonitor(HMONITOR monitor, int width, int height);
-    void SetDrawCursor(bool isDrawCursor);
-    void Close();
-    AVFrame* GetFrame() { return m_capture->GetFrame(); }
-
-private:
-    winrt::Windows::UI::Composition::Compositor m_compositor {nullptr};
-    winrt::Windows::UI::Composition::ContainerVisual m_root {nullptr};
-    winrt::Windows::UI::Composition::SpriteVisual m_content {nullptr};
-    winrt::Windows::UI::Composition::CompositionSurfaceBrush m_brush {nullptr};
-
-    winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice m_device {nullptr};
-    SimpleCapture* m_capture = nullptr;
-};

+ 0 - 175
AvRecorder/test/capturer/wgc/SimpleCapture.cpp

@@ -1,175 +0,0 @@
-
-// D3D
-#include <d3d11_4.h>
-#include <dxgi1_6.h>
-#include <d2d1_3.h>
-#include <wincodec.h>
-
-#include "pch.h"
-#include "SimpleCapture.h"
-#include "basic/basic.h"
-
-using namespace winrt;
-using namespace Windows;
-using namespace Windows::Foundation;
-using namespace Windows::System;
-using namespace Windows::Graphics;
-using namespace Windows::Graphics::Capture;
-using namespace Windows::Graphics::DirectX;
-using namespace Windows::Graphics::DirectX::Direct3D11;
-using namespace Windows::Foundation::Numerics;
-using namespace Windows::UI;
-using namespace Windows::UI::Composition;
-
-#undef min
-#undef max
-
-SimpleCapture::SimpleCapture(
-    IDirect3DDevice const& device,
-    GraphicsCaptureItem const& item,
-    int width, int height)
-{
-    m_item = item;
-    m_device = device;
-
-    // Set up
-    auto d3dDevice = GetDXGIInterfaceFromObject<ID3D11Device>(m_device);
-    d3dDevice->GetImmediateContext(m_d3dContext.put());
-    auto size = m_item.Size();
-
-    m_swapChain = CreateDXGISwapChain(
-        d3dDevice,
-        static_cast<uint32_t>(size.Width),
-        static_cast<uint32_t>(size.Height),
-        static_cast<DXGI_FORMAT>(DirectXPixelFormat::B8G8R8A8UIntNormalized),
-        2);
-
-    // Create framepool, define pixel format (DXGI_FORMAT_B8G8R8A8_UNORM), and frame size.
-    m_framePool = Direct3D11CaptureFramePool::Create(
-        m_device,
-        DirectXPixelFormat::B8G8R8A8UIntNormalized,
-        2,
-        size);
-
-    m_session = m_framePool.CreateCaptureSession(m_item);
-    m_lastSize = size;
-    m_frameArrived = m_framePool.FrameArrived(auto_revoke, {this, &SimpleCapture::OnFrameArrived});
-
-    // Set ColorSpace
-    D3D11_VIDEO_PROCESSOR_COLOR_SPACE inputColorSpace;
-    inputColorSpace.Usage = 1;
-    inputColorSpace.RGB_Range = 0;
-    inputColorSpace.YCbCr_Matrix = 1;
-    inputColorSpace.YCbCr_xvYCC = 0;
-    inputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_0_255;
-
-    D3D11_VIDEO_PROCESSOR_COLOR_SPACE outputColorSpace;
-    outputColorSpace.Usage = 0;
-    outputColorSpace.RGB_Range = 0;
-    outputColorSpace.YCbCr_Matrix = 1;
-    outputColorSpace.YCbCr_xvYCC = 0;
-    outputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_16_235;
-    m_rgbToNv12.Open(d3dDevice.get(), m_d3dContext.get(), inputColorSpace, outputColorSpace);
-    m_nv12Frame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_NV12, width, height);
-    m_xrgbFrame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_BGR0, width, height);
-    __CheckNo(m_nv12Frame);
-    __CheckNo(m_xrgbFrame);
-    m_isCapture = true;
-    m_cnt = 5;
-}
-
-// Start sending capture frames
-void SimpleCapture::StartCapture()
-{
-    CheckClosed();
-    m_session.StartCapture();
-}
-
-ICompositionSurface SimpleCapture::CreateSurface(
-    Compositor const& compositor)
-{
-    CheckClosed();
-    return CreateCompositionSurfaceForSwapChain(compositor, m_swapChain.get());
-}
-
-// Process captured frames
-void SimpleCapture::Close()
-{
-    auto expected = false;
-    if (m_closed.compare_exchange_strong(expected, true)) {
-        m_frameArrived.revoke();
-        m_framePool.Close();
-        m_session.Close();
-        m_swapChain = nullptr;
-        m_framePool = nullptr;
-        m_session = nullptr;
-        m_item = nullptr;
-    }
-    m_nv12Buffers.Clear();
-    m_xrgbBuffers.Clear();
-    m_rgbToNv12.Close();
-    Free(m_nv12Frame, [this] { av_frame_free(&m_nv12Frame); });
-    Free(m_xrgbFrame, [this] { av_frame_free(&m_xrgbFrame); });
-}
-
-void SimpleCapture::OnFrameArrived(
-    Direct3D11CaptureFramePool const& sender,
-    winrt::Windows::Foundation::IInspectable const&)
-{
-    auto newSize = false;
-    auto frame = sender.TryGetNextFrame();
-    auto frameContentSize = frame.ContentSize();
-    if (frameContentSize.Width != m_lastSize.Width || frameContentSize.Height != m_lastSize.Height) {
-        // The thing we have been capturing has changed size.
-        // We need to resize our swap chain first, then blit the pixels.
-        // After we do that, retire the frame and then recreate our frame pool.
-        newSize = true;
-        m_lastSize = frameContentSize;
-        m_swapChain->ResizeBuffers(
-            2,
-            static_cast<uint32_t>(m_lastSize.Width),
-            static_cast<uint32_t>(m_lastSize.Height),
-            static_cast<DXGI_FORMAT>(DirectXPixelFormat::B8G8R8A8UIntNormalized),
-            0);
-        m_nv12Buffers.Clear();
-        m_xrgbBuffers.Clear();
-    }
-    if (m_cnt > 0) {
-        --m_cnt;
-    }
-    m_isCapture = (m_isCapture && !newSize) || m_cnt > 0;
-    if (m_isCapture) {
-        auto frameSurface = GetDXGIInterfaceFromObject<ID3D11Texture2D>(frame.Surface());
-        D3D11_TEXTURE2D_DESC desc;
-        frameSurface->GetDesc(&desc);
-        auto d3dDevice = GetDXGIInterfaceFromObject<ID3D11Device>(m_device);
-
-        // 首先尝试创建 NV12 纹理
-        auto tmpFormat = desc.Format;
-        desc.Format = DXGI_FORMAT_NV12;
-        if (GenNv12Frame(d3dDevice.get(), m_d3dContext.get(), desc, frameSurface.get(),
-                m_nv12Buffers, m_nv12Frame, m_rgbToNv12)) {
-            m_pixType = _PixType::NV12;
-        } else {
-            desc.Format = tmpFormat;
-            GenRgbFrame(d3dDevice.get(), m_d3dContext.get(), desc, frameSurface.get(),
-                m_xrgbBuffers, m_xrgbFrame);
-            m_pixType = _PixType::RGB;
-        }
-    }
-
-    // com_ptr<ID3D11Texture2D> backBuffer;
-    // check_hresult(m_swapChain->GetBuffer(0, guid_of<ID3D11Texture2D>(), backBuffer.put_void()));
-    // m_d3dContext->CopyResource(backBuffer.get(), m_bufferFiller.GetMap());
-
-    // DXGI_PRESENT_PARAMETERS presentParameters = {0};
-    // auto hr = m_swapChain->Present1(1, 0, &presentParameters);
-
-    if (newSize) {
-        m_framePool.Recreate(
-            m_device,
-            DirectXPixelFormat::B8G8R8A8UIntNormalized,
-            2,
-            m_lastSize);
-    }
-}

+ 0 - 62
AvRecorder/test/capturer/wgc/SimpleCapture.h

@@ -1,62 +0,0 @@
-
-#pragma once
-
-#include <chrono>
-#include "d3d/gen_frame.h"
-
-class SimpleCapture {
-public:
-    SimpleCapture(
-        winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice const& device,
-        winrt::Windows::Graphics::Capture::GraphicsCaptureItem const& item,
-        int width, int height);
-    ~SimpleCapture() { Close(); }
-
-    void StartCapture();
-    winrt::Windows::UI::Composition::ICompositionSurface CreateSurface(
-        winrt::Windows::UI::Composition::Compositor const& compositor);
-
-    void SetDrawCursor(bool isDrawCursor) { m_session.IsCursorCaptureEnabled(isDrawCursor); }
-
-    void Close();
-
-    AVFrame* GetFrame() const noexcept { return m_pixType == NV12 ? m_nv12Frame : m_xrgbFrame; }
-
-private:
-    void OnFrameArrived(
-        winrt::Windows::Graphics::Capture::Direct3D11CaptureFramePool const& sender,
-        winrt::Windows::Foundation::IInspectable const& args);
-
-    void CheckClosed()
-    {
-        if (m_closed.load() == true) {
-            throw winrt::hresult_error(RO_E_CLOSED);
-        }
-    }
-
-private:
-    enum _PixType {
-        NV12,
-        RGB
-    };
-
-    winrt::Windows::Graphics::Capture::GraphicsCaptureItem m_item {nullptr};
-    winrt::Windows::Graphics::Capture::Direct3D11CaptureFramePool m_framePool {nullptr};
-    winrt::Windows::Graphics::Capture::GraphicsCaptureSession m_session {nullptr};
-    winrt::Windows::Graphics::SizeInt32 m_lastSize;
-
-    winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice m_device {nullptr};
-    winrt::com_ptr<IDXGISwapChain1> m_swapChain {nullptr};
-    winrt::com_ptr<ID3D11DeviceContext> m_d3dContext {nullptr};
-
-    std::atomic<bool> m_closed = false;
-    winrt::Windows::Graphics::Capture::Direct3D11CaptureFramePool::FrameArrived_revoker m_frameArrived;
-    AVFrame* m_xrgbFrame = nullptr;
-    AVFrame* m_nv12Frame = nullptr;
-    BufferFiller m_xrgbBuffers;
-    BufferFiller m_nv12Buffers;
-    D3dConverter m_rgbToNv12;
-    _PixType m_pixType;
-    bool m_isCapture = true;
-    int m_cnt = 5;
-};

+ 0 - 24
AvRecorder/test/capturer/wgc/capture.interop.h

@@ -1,24 +0,0 @@
-#pragma once
-#include <guiddef.h>
-
-#include <winrt/Windows.Graphics.Capture.h>
-#include <windows.graphics.capture.interop.h>
-#include <windows.graphics.capture.h>
-
-inline auto CreateCaptureItemForWindow(HWND hwnd)
-{
-    auto activation_factory = winrt::get_activation_factory<winrt::Windows::Graphics::Capture::GraphicsCaptureItem>();
-    auto interop_factory = activation_factory.as<IGraphicsCaptureItemInterop>();
-    winrt::Windows::Graphics::Capture::GraphicsCaptureItem item = {nullptr};
-    interop_factory->CreateForWindow(hwnd, winrt::guid_of<ABI::Windows::Graphics::Capture::IGraphicsCaptureItem>(), reinterpret_cast<void**>(winrt::put_abi(item)));
-    return item;
-}
-
-inline auto CreateCaptureItemForMonitor(HMONITOR monitor)
-{
-    auto activation_factory = winrt::get_activation_factory<winrt::Windows::Graphics::Capture::GraphicsCaptureItem>();
-    auto interop_factory = activation_factory.as<IGraphicsCaptureItemInterop>();
-    winrt::Windows::Graphics::Capture::GraphicsCaptureItem item = {nullptr};
-    interop_factory->CreateForMonitor(monitor, winrt::guid_of<ABI::Windows::Graphics::Capture::IGraphicsCaptureItem>(), reinterpret_cast<void**>(winrt::put_abi(item)));
-    return item;
-}

+ 0 - 61
AvRecorder/test/capturer/wgc/composition.interop.h

@@ -1,61 +0,0 @@
-#pragma once
-#include <guiddef.h>
-
-#include <winrt/Windows.UI.Composition.h>
-#include <windows.ui.composition.interop.h>
-#include <d2d1_1.h>
-
-inline auto CreateCompositionGraphicsDevice(
-    winrt::Windows::UI::Composition::Compositor const& compositor,
-    ::IUnknown* device)
-{
-    winrt::Windows::UI::Composition::CompositionGraphicsDevice graphicsDevice{ nullptr };
-    auto compositorInterop = compositor.as<ABI::Windows::UI::Composition::ICompositorInterop>();
-    winrt::com_ptr<ABI::Windows::UI::Composition::ICompositionGraphicsDevice> graphicsInterop;
-    winrt::check_hresult(compositorInterop->CreateGraphicsDevice(device, graphicsInterop.put()));
-    winrt::check_hresult(graphicsInterop->QueryInterface(winrt::guid_of<winrt::Windows::UI::Composition::CompositionGraphicsDevice>(),
-        reinterpret_cast<void**>(winrt::put_abi(graphicsDevice))));
-    return graphicsDevice;
-}
-
-inline void ResizeSurface(
-    winrt::Windows::UI::Composition::CompositionDrawingSurface const& surface,
-    winrt::Windows::Foundation::Size const& size)
-{
-    auto surfaceInterop = surface.as<ABI::Windows::UI::Composition::ICompositionDrawingSurfaceInterop>();
-    SIZE newSize = {};
-    newSize.cx = static_cast<LONG>(std::round(size.Width));
-    newSize.cy = static_cast<LONG>(std::round(size.Height));
-    winrt::check_hresult(surfaceInterop->Resize(newSize));
-}
-
-inline auto SurfaceBeginDraw(
-    winrt::Windows::UI::Composition::CompositionDrawingSurface const& surface)
-{
-    auto surfaceInterop = surface.as<ABI::Windows::UI::Composition::ICompositionDrawingSurfaceInterop>();
-    winrt::com_ptr<ID2D1DeviceContext> context;
-    POINT offset = {};
-    winrt::check_hresult(surfaceInterop->BeginDraw(nullptr, __uuidof(ID2D1DeviceContext), context.put_void(), &offset));
-    context->SetTransform(D2D1::Matrix3x2F::Translation((FLOAT)offset.x,(FLOAT) offset.y));
-    return context;
-}
-
-inline void SurfaceEndDraw(
-    winrt::Windows::UI::Composition::CompositionDrawingSurface const& surface)
-{
-    auto surfaceInterop = surface.as<ABI::Windows::UI::Composition::ICompositionDrawingSurfaceInterop>();
-    winrt::check_hresult(surfaceInterop->EndDraw());
-}
-
-inline auto CreateCompositionSurfaceForSwapChain(
-    winrt::Windows::UI::Composition::Compositor const& compositor,
-    ::IUnknown* swapChain)
-{
-    winrt::Windows::UI::Composition::ICompositionSurface surface{ nullptr };
-    auto compositorInterop = compositor.as<ABI::Windows::UI::Composition::ICompositorInterop>();
-    winrt::com_ptr<ABI::Windows::UI::Composition::ICompositionSurface> surfaceInterop;
-    winrt::check_hresult(compositorInterop->CreateCompositionSurfaceForSwapChain(swapChain, surfaceInterop.put()));
-    winrt::check_hresult(surfaceInterop->QueryInterface(winrt::guid_of<winrt::Windows::UI::Composition::ICompositionSurface>(),
-        reinterpret_cast<void**>(winrt::put_abi(surface))));
-    return surface;
-}

+ 0 - 173
AvRecorder/test/capturer/wgc/d3dHelpers.h

@@ -1,173 +0,0 @@
-#pragma once
-
-#include "composition.interop.h"
-
-struct SurfaceContext
-{
-public:
-    SurfaceContext(std::nullptr_t) {}
-    SurfaceContext(
-        winrt::Windows::UI::Composition::CompositionDrawingSurface surface)
-    {
-        m_surface = surface;
-        m_d2dContext = SurfaceBeginDraw(m_surface);
-    }
-    ~SurfaceContext()
-    {
-        SurfaceEndDraw(m_surface);
-        m_d2dContext = nullptr;
-        m_surface = nullptr;
-    }
-
-    winrt::com_ptr<ID2D1DeviceContext> GetDeviceContext() { return m_d2dContext; }
-
-private:
-    winrt::com_ptr<ID2D1DeviceContext> m_d2dContext;
-    winrt::Windows::UI::Composition::CompositionDrawingSurface m_surface{ nullptr };
-};
-
-struct D3D11DeviceLock
-{
-public:
-    D3D11DeviceLock(std::nullopt_t) {}
-    D3D11DeviceLock(ID3D11Multithread* pMultithread)
-    {
-        m_multithread.copy_from(pMultithread);
-        m_multithread->Enter();
-    }
-    ~D3D11DeviceLock()
-    {
-        m_multithread->Leave();
-        m_multithread = nullptr;
-    }
-private:
-    winrt::com_ptr<ID3D11Multithread> m_multithread;
-};
-
-inline auto
-CreateWICFactory()
-{
-    winrt::com_ptr<IWICImagingFactory2> wicFactory;
-    winrt::check_hresult(
-        ::CoCreateInstance(
-            CLSID_WICImagingFactory,
-            nullptr,
-            CLSCTX_INPROC_SERVER,
-            winrt::guid_of<IWICImagingFactory>(),
-            wicFactory.put_void()));
-
-    return wicFactory;
-}
-
-inline auto
-CreateD2DDevice(
-    winrt::com_ptr<ID2D1Factory1> const& factory,
-    winrt::com_ptr<ID3D11Device> const& device)
-{
-    winrt::com_ptr<ID2D1Device> result;
-    winrt::check_hresult(factory->CreateDevice(device.as<IDXGIDevice>().get(), result.put()));
-    return result;
-}
-
-inline auto
-CreateD3DDevice(
-    D3D_DRIVER_TYPE const type,
-    winrt::com_ptr<ID3D11Device>& device)
-{
-    WINRT_ASSERT(!device);
-
-    UINT flags = D3D11_CREATE_DEVICE_BGRA_SUPPORT;
-
-//#ifdef _DEBUG
-//	flags |= D3D11_CREATE_DEVICE_DEBUG;
-//#endif
-
-    return D3D11CreateDevice(
-        nullptr,
-        type,
-        nullptr,
-        flags,
-        nullptr, 0,
-        D3D11_SDK_VERSION,
-        device.put(),
-        nullptr,
-        nullptr);
-}
-
-inline auto
-CreateD3DDevice()
-{
-    winrt::com_ptr<ID3D11Device> device;
-    HRESULT hr = CreateD3DDevice(D3D_DRIVER_TYPE_HARDWARE, device);
-
-    if (DXGI_ERROR_UNSUPPORTED == hr)
-    {
-        hr = CreateD3DDevice(D3D_DRIVER_TYPE_WARP, device);
-    }
-
-    winrt::check_hresult(hr);
-    return device;
-}
-
-inline auto
-CreateD2DFactory()
-{
-    D2D1_FACTORY_OPTIONS options{};
-
-//#ifdef _DEBUG
-//	options.debugLevel = D2D1_DEBUG_LEVEL_INFORMATION;
-//#endif
-
-    winrt::com_ptr<ID2D1Factory1> factory;
-
-    winrt::check_hresult(D2D1CreateFactory(
-        D2D1_FACTORY_TYPE_SINGLE_THREADED,
-        options,
-        factory.put()));
-
-    return factory;
-}
-
-inline auto
-CreateDXGISwapChain(
-    winrt::com_ptr<ID3D11Device> const& device,
-    const DXGI_SWAP_CHAIN_DESC1* desc)
-{
-    auto dxgiDevice = device.as<IDXGIDevice2>();
-    winrt::com_ptr<IDXGIAdapter> adapter;
-    winrt::check_hresult(dxgiDevice->GetParent(winrt::guid_of<IDXGIAdapter>(), adapter.put_void()));
-    winrt::com_ptr<IDXGIFactory2> factory;
-    winrt::check_hresult(adapter->GetParent(winrt::guid_of<IDXGIFactory2>(), factory.put_void()));
-
-    winrt::com_ptr<IDXGISwapChain1> swapchain;
-    winrt::check_hresult(factory->CreateSwapChainForComposition(
-        device.get(),
-        desc,
-        nullptr,
-        swapchain.put()));
-
-    return swapchain;
-}
-
-inline auto
-CreateDXGISwapChain(
-    winrt::com_ptr<ID3D11Device> const& device,
-    uint32_t width,
-    uint32_t height,
-    DXGI_FORMAT format,
-    uint32_t bufferCount)
-{
-    DXGI_SWAP_CHAIN_DESC1 desc = {};
-    desc.Width = width;
-    desc.Height = height;
-    desc.Format = format;
-    desc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
-    desc.SampleDesc.Count = 1;
-    desc.SampleDesc.Quality = 0;
-    desc.BufferCount = bufferCount;
-    desc.Scaling = DXGI_SCALING_STRETCH;
-    desc.SwapEffect = DXGI_SWAP_EFFECT_FLIP_SEQUENTIAL;
-    desc.AlphaMode = DXGI_ALPHA_MODE_PREMULTIPLIED;
-
-    return CreateDXGISwapChain(device, &desc);
-}

+ 0 - 40
AvRecorder/test/capturer/wgc/direct3d11.interop.h

@@ -1,40 +0,0 @@
-#pragma once
-#include <winrt/windows.graphics.directx.direct3d11.h>
-
-extern "C"
-{
-    HRESULT __stdcall CreateDirect3D11DeviceFromDXGIDevice(::IDXGIDevice* dxgiDevice,
-        ::IInspectable** graphicsDevice);
-
-    HRESULT __stdcall CreateDirect3D11SurfaceFromDXGISurface(::IDXGISurface* dgxiSurface,
-        ::IInspectable** graphicsSurface);
-}
-
-struct __declspec(uuid("A9B3D012-3DF2-4EE3-B8D1-8695F457D3C1"))
-    IDirect3DDxgiInterfaceAccess : ::IUnknown
-{
-    virtual HRESULT __stdcall GetInterface(GUID const& id, void** object) = 0;
-};
-
-inline auto CreateDirect3DDevice(IDXGIDevice* dxgi_device)
-{
-    winrt::com_ptr<::IInspectable> d3d_device;
-    winrt::check_hresult(CreateDirect3D11DeviceFromDXGIDevice(dxgi_device, d3d_device.put()));
-    return d3d_device.as<winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice>();
-}
-
-inline auto CreateDirect3DSurface(IDXGISurface* dxgi_surface)
-{
-    winrt::com_ptr<::IInspectable> d3d_surface;
-    winrt::check_hresult(CreateDirect3D11SurfaceFromDXGISurface(dxgi_surface, d3d_surface.put()));
-    return d3d_surface.as<winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DSurface>();
-}
-
-template <typename T>
-auto GetDXGIInterfaceFromObject(winrt::Windows::Foundation::IInspectable const& object)
-{
-    auto access = object.as<IDirect3DDxgiInterfaceAccess>();
-    winrt::com_ptr<T> result;
-    winrt::check_hresult(access->GetInterface(winrt::guid_of<T>(), result.put_void()));
-    return result;
-}

+ 0 - 9
AvRecorder/test/capturer/wgc/pch.cpp

@@ -1,9 +0,0 @@
-
-// D3D
-#include <d3d11_4.h>
-#include <dxgi1_6.h>
-#include <d2d1_3.h>
-#include <wincodec.h>
-
-
-#include "pch.h"

+ 0 - 34
AvRecorder/test/capturer/wgc/pch.h

@@ -1,34 +0,0 @@
-#pragma once
-#include <Unknwn.h>
-#include <inspectable.h>
-
-// WinRT
-
-#include <winrt/Windows.Foundation.h>
-#include <winrt/Windows.Graphics.Capture.h>
-#include <winrt/Windows.Graphics.DirectX.Direct3d11.h>
-#include <winrt/Windows.Graphics.DirectX.h>
-#include <winrt/Windows.System.h>
-#include <winrt/Windows.UI.Composition.Desktop.h>
-#include <winrt/Windows.UI.Composition.h>
-#include <winrt/Windows.UI.Popups.h>
-#include <winrt/Windows.UI.h>
-
-#include <windows.ui.composition.interop.h>
-#include <DispatcherQueue.h>
-
-// STL
-#include <atomic>
-#include <memory>
-
-// D3D
-#include <d3d11_4.h>
-#include <dxgi1_6.h>
-#include <d2d1_3.h>
-#include <wincodec.h>
-
-// Helpers
-#include "composition.interop.h"
-#include "d3dHelpers.h"
-#include "direct3d11.interop.h"
-#include "capture.interop.h"

+ 0 - 32
AvRecorder/test/capturer/wgc/winrt.cpp

@@ -1,32 +0,0 @@
-#include "pch.h"
-
-#include <ShObjIdl.h>
-
-using namespace winrt;
-using namespace Windows::UI;
-using namespace Windows::UI::Composition;
-using namespace Windows::UI::Composition::Desktop;
-
-// Direct3D11CaptureFramePool requires a DispatcherQueue
-winrt::Windows::System::DispatcherQueueController CreateDispatcherQueueController()
-{
-    namespace abi = ABI::Windows::System;
-
-    DispatcherQueueOptions options {
-        sizeof(DispatcherQueueOptions),
-        DQTYPE_THREAD_CURRENT,
-        DQTAT_COM_STA};
-
-    Windows::System::DispatcherQueueController controller {nullptr};
-    check_hresult(CreateDispatcherQueueController(options, reinterpret_cast<abi::IDispatcherQueueController**>(put_abi(controller))));
-    return controller;
-}
-
-DesktopWindowTarget CreateDesktopWindowTarget(Compositor const& compositor, HWND window)
-{
-    namespace abi = ABI::Windows::UI::Composition::Desktop;
-    auto interop = compositor.as<abi::ICompositorDesktopInterop>();
-    DesktopWindowTarget target {nullptr};
-    check_hresult(interop->CreateDesktopWindowTarget(window, true, reinterpret_cast<abi::IDesktopWindowTarget**>(put_abi(target))));
-    return target;
-}

+ 0 - 24
AvRecorder/test/capturer/wgc/winrt.h

@@ -1,24 +0,0 @@
-#pragma once
-
-// WinRT
-#include <winrt/Windows.Foundation.h>
-#include <winrt/Windows.System.h>
-#include <winrt/Windows.UI.h>
-#include <winrt/Windows.UI.Composition.h>
-#include <winrt/Windows.UI.Composition.Desktop.h>
-#include <winrt/Windows.UI.Popups.h>
-#include <winrt/Windows.Graphics.Capture.h>
-#include <winrt/Windows.Graphics.DirectX.h>
-#include <winrt/Windows.Graphics.DirectX.Direct3d11.h>
-
-#include <windows.ui.composition.interop.h>
-#include <DispatcherQueue.h>
-
-using namespace winrt;
-using namespace Windows::UI;
-using namespace Windows::UI::Composition;
-using namespace Windows::UI::Composition::Desktop;
-
-// Direct3D11CaptureFramePool requires a DispatcherQueue
-winrt::Windows::System::DispatcherQueueController CreateDispatcherQueueController();
-DesktopWindowTarget CreateDesktopWindowTarget(Compositor const& compositor, HWND window);

+ 0 - 96
AvRecorder/test/capturer/wgc_capturer.cpp

@@ -1,96 +0,0 @@
-#include "wgc_capturer.h"
-
-#include "wgc/winrt.h"
-
-#include <QWidget>
-
-winrt::Windows::System::DispatcherQueue* WgcCapturer::queuePtr = nullptr;
-winrt::Windows::UI::Composition::ContainerVisual* WgcCapturer::rootPtr = nullptr;
-std::list<WgcCapturer*> WgcCapturer::_capturers;
-QWidget* __widget = nullptr;
-
-void WgcCapturer::Init()
-{
-    if (queuePtr != nullptr) {
-        return;
-    }
-    // Init COM
-    init_apartment(apartment_type::single_threaded);
-    // Create a DispatcherQueue for our thread
-    static auto controller = CreateDispatcherQueueController();
-    // Initialize Composition
-    static auto compositor = Compositor();
-    __widget = new QWidget;
-    // __widget->resize(800, 600);
-    // __widget->show();
-    static auto target = CreateDesktopWindowTarget(compositor, (HWND)__widget->winId());
-    static auto root = compositor.CreateContainerVisual();
-    root.RelativeSizeAdjustment({1.0f, 1.0f});
-    target.Root(root);
-
-    // Enqueue our capture work on the dispatcher
-    static auto queue = controller.DispatcherQueue();
-    queuePtr = &queue;
-    rootPtr = &root;
-    // 首先 New 一个 Capturer 备用
-    New();
-}
-
-void WgcCapturer::Uninit()
-{
-    delete __widget;
-    while (!_capturers.empty()) {
-        delete *_capturers.begin();
-        _capturers.erase(_capturers.begin());
-    }
-}
-
-WgcCapturer* WgcCapturer::New()
-{
-    // 将上一个 new 好的对象返回,并重新预备一个新的
-    if (_capturers.empty()) {
-        _capturers.push_back(new WgcCapturer);
-    }
-    return *(--_capturers.end());
-}
-
-void WgcCapturer::Delete(WgcCapturer* ptr)
-{
-    // auto iter = std::find(_capturers.begin(), _capturers.end(), ptr);
-    // if (iter == _capturers.end()) {
-    //     return;
-    // }
-    // if (*iter != nullptr) {
-    //     delete *iter;
-    // }
-    // _capturers.erase(iter);
-}
-
-WgcCapturer::WgcCapturer()
-{
-    _app = new App;
-    _isAppInit = false;
-    auto success = queuePtr->TryEnqueue([=]() -> void {
-        _app->Initialize(*rootPtr);
-        _isAppInit = true;
-    });
-    WINRT_VERIFY(success);
-}
-
-WgcCapturer::~WgcCapturer()
-{
-    if (_app) {
-        delete _app;
-        _app = nullptr;
-    }
-}
-
-bool WgcCapturer::StartCapturerMonitor(HMONITOR monitor, int width, int height)
-{
-    return _app->StartCaptureMonitor(monitor, width, height);
-}
-
-bool WgcCapturer::StartCapturerWindow(HWND hwnd, int width, int height)
-{
-    return _app->StartCaptureWindow(hwnd, width, height);
-}

+ 0 - 35
AvRecorder/test/capturer/wgc_capturer.h

@@ -1,35 +0,0 @@
-#ifndef __WGC_CAPTURER_H__
-#define __WGC_CAPTURER_H__
-
-#include "wgc/pch.h"
-
-#include "wgc/App.h"
-#include <list>
-
-class WgcCapturer {
-public:
-    bool StartCapturerWindow(HWND hwnd, int width, int height);
-    bool StartCapturerMonitor(HMONITOR monitor, int width, int height);
-    void SetDrawCursor(bool isDrawCursor) { _app->SetDrawCursor(isDrawCursor); }
-    static void Init();
-    static WgcCapturer* New();
-    static void Delete(WgcCapturer* ptr);
-    static void Uninit();
-    void Close()
-    {
-        if (_app != nullptr) {
-            _app->Close();
-        }
-    }
-    AVFrame* GetFrame() { return _app->GetFrame(); }
-
-private:
-    WgcCapturer();
-    ~WgcCapturer();
-    App* _app = nullptr;
-    bool _isAppInit = false;
-    static std::list<WgcCapturer*> _capturers;
-    static winrt::Windows::System::DispatcherQueue* queuePtr;
-    static winrt::Windows::UI::Composition::ContainerVisual* rootPtr;
-};
-#endif

+ 0 - 60
AvRecorder/test/d3d/buffer_filler.cpp

@@ -1,60 +0,0 @@
-#include "buffer_filler.h"
-#include "basic/basic.h"
-
-bool BufferFiller::Fill(ID3D11Device* device, D3D11_TEXTURE2D_DESC desc, int maxCnt)
-{
-    // 设置通用的纹理属性
-    desc.ArraySize = 1;
-    desc.BindFlags = 0;
-    desc.MiscFlags = 0;
-    desc.SampleDesc.Count = 1;
-    desc.SampleDesc.Quality = 0;
-    desc.MipLevels = 1;
-    desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
-    desc.Usage = D3D11_USAGE_STAGING;
-
-    // 如果已达到最大缓冲区数量,则替换现有缓冲区
-    if (_buffers.size() == maxCnt) {
-        ID3D11Texture2D* dstImg = nullptr;
-        if (FAILED(device->CreateTexture2D(&desc, nullptr, &dstImg))) {
-            return false;
-        }
-        // 释放旧的缓冲区并替换
-        if (_buffers[_mapIdx]) {
-            _buffers[_mapIdx]->Release();
-        }
-        _buffers[_mapIdx] = dstImg;
-        _mapIdx = (_mapIdx + 1) % _buffers.size();
-        return true;
-    }
-
-    // 否则创建新的缓冲区直到达到最大数量
-    while (_buffers.size() < maxCnt) {
-        ID3D11Texture2D* dstImg = nullptr;
-        if (FAILED(device->CreateTexture2D(&desc, nullptr, &dstImg))) {
-            break;
-        }
-        _buffers.push_back(dstImg);
-    }
-
-    __CheckBool(!_buffers.empty());
-    _copyIdx = 0;
-    _mapIdx = (_copyIdx + 1) % _buffers.size();
-    return true;
-}
-
-bool BufferFiller::Reset()
-{
-    _buffers[_mapIdx]->Release();
-    _buffers[_mapIdx] = nullptr;
-    _copyIdx = (_copyIdx + 1) % _buffers.size();
-    return true;
-}
-
-void BufferFiller::Clear()
-{
-    for (auto&& dstImg : _buffers) {
-        Free(dstImg, [&dstImg] { dstImg->Release(); });
-    }
-    _buffers.clear();
-}

+ 0 - 24
AvRecorder/test/d3d/buffer_filler.h

@@ -1,24 +0,0 @@
-#ifndef __BUFFER_FILLER_H__
-#define __BUFFER_FILLER_H__
-#include <d3d11.h>
-#include <vector>
-
-class BufferFiller {
-public:
-    bool Fill(ID3D11Device* device, D3D11_TEXTURE2D_DESC desc, int maxCnt = 3);
-    bool Reset();
-    ID3D11Texture2D* GetCopy() { return _buffers[_copyIdx]; }
-    ID3D11Texture2D* GetMap() { return _buffers[_mapIdx]; }
-    void Clear();
-    ~BufferFiller()
-    {
-        Clear();
-    }
-
-private:
-    int _mapIdx = 0;
-    int _copyIdx = 0;
-    std::vector<ID3D11Texture2D*> _buffers;
-};
-
-#endif

+ 0 - 131
AvRecorder/test/d3d/convert.cpp

@@ -1,131 +0,0 @@
-
-#include "convert.h"
-using namespace std;
-
-#if !defined(SAFE_RELEASE)
-#define SAFE_RELEASE(X) \
-    if (X) {            \
-        X->Release();   \
-        X = nullptr;    \
-    }
-#endif
-
-#if !defined(PRINTERR1)
-#define PRINTERR1(x) printf(__FUNCTION__ ": Error 0x%08x at line %d in file %s\n", x, __LINE__, __FILE__);
-#endif
-
-#if !defined(PRINTERR)
-#define PRINTERR(x, y) printf(__FUNCTION__ ": Error 0x%08x in %s at line %d in file %s\n", x, y, __LINE__, __FILE__);
-#endif
-
-/// Initialize Video Context
-HRESULT D3dConverter::Open(ID3D11Device* pDev, ID3D11DeviceContext* pCtx,
-    const D3D11_VIDEO_PROCESSOR_COLOR_SPACE& inColorSpace, D3D11_VIDEO_PROCESSOR_COLOR_SPACE& outColorSpace)
-{
-    m_pDev = pDev;
-    m_pCtx = pCtx;
-    m_pDev->AddRef();
-    m_pCtx->AddRef();
-    /// Obtain Video device and Video device context
-    HRESULT hr = m_pDev->QueryInterface(__uuidof(ID3D11VideoDevice), (void**)&m_pVid);
-    if (FAILED(hr)) {
-        PRINTERR(hr, "QAI for ID3D11VideoDevice");
-    }
-    hr = m_pCtx->QueryInterface(__uuidof(ID3D11VideoContext), (void**)&m_pVidCtx);
-    if (FAILED(hr)) {
-        PRINTERR(hr, "QAI for ID3D11VideoContext");
-    }
-    _inColorSpace = inColorSpace;
-    _outColorSpace = outColorSpace;
-    return hr;
-}
-
-/// Release all Resources
-void D3dConverter::Close()
-{
-    for (auto& it : viewMap) {
-        ID3D11VideoProcessorOutputView* pVPOV = it.second;
-        pVPOV->Release();
-    }
-    SAFE_RELEASE(m_pVP);
-    SAFE_RELEASE(m_pVPEnum);
-    SAFE_RELEASE(m_pVidCtx);
-    SAFE_RELEASE(m_pVid);
-    SAFE_RELEASE(m_pCtx);
-    SAFE_RELEASE(m_pDev);
-}
-
-/// Perform Colorspace conversion
-HRESULT D3dConverter::Convert(ID3D11Texture2D* pIn, ID3D11Texture2D* pOut)
-{
-    HRESULT hr = S_OK;
-
-    D3D11_TEXTURE2D_DESC inDesc = {0};
-    D3D11_TEXTURE2D_DESC outDesc = {0};
-    pIn->GetDesc(&inDesc);
-    pOut->GetDesc(&outDesc);
-
-    /// Check if VideoProcessor needs to be reconfigured
-    /// Reconfiguration is required if input/output dimensions have changed
-    if (m_pVP) {
-        if (m_inDesc.Width != inDesc.Width || m_inDesc.Height != inDesc.Height || m_outDesc.Width != outDesc.Width || m_outDesc.Height != outDesc.Height) {
-            SAFE_RELEASE(m_pVPEnum);
-            SAFE_RELEASE(m_pVP);
-        }
-    }
-
-    if (!m_pVP) {
-        /// Initialize Video Processor
-        m_inDesc = inDesc;
-        m_outDesc = outDesc;
-        D3D11_VIDEO_PROCESSOR_CONTENT_DESC contentDesc = {
-            D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE,
-            {0, 0}, inDesc.Width, inDesc.Height,
-            {0, 0}, outDesc.Width, outDesc.Height,
-            D3D11_VIDEO_USAGE_PLAYBACK_NORMAL};
-        hr = m_pVid->CreateVideoProcessorEnumerator(&contentDesc, &m_pVPEnum);
-        if (FAILED(hr)) {
-            PRINTERR(hr, "CreateVideoProcessorEnumerator");
-        }
-        hr = m_pVid->CreateVideoProcessor(m_pVPEnum, 0, &m_pVP);
-        if (FAILED(hr)) {
-            PRINTERR(hr, "CreateVideoProcessor");
-        }
-
-        m_pVidCtx->VideoProcessorSetStreamColorSpace(m_pVP, 0, &_inColorSpace);
-        m_pVidCtx->VideoProcessorSetOutputColorSpace(m_pVP, &_outColorSpace);
-    }
-
-    /// Obtain Video Processor Input view from input texture
-    ID3D11VideoProcessorInputView* pVPIn = nullptr;
-    D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC inputVD = {0, D3D11_VPIV_DIMENSION_TEXTURE2D, {0, 0}};
-    hr = m_pVid->CreateVideoProcessorInputView(pIn, m_pVPEnum, &inputVD, &pVPIn);
-    if (FAILED(hr)) {
-        PRINTERR(hr, "CreateVideoProcessInputView");
-        return hr;
-    }
-
-    /// Obtain Video Processor Output view from output texture
-    ID3D11VideoProcessorOutputView* pVPOV = nullptr;
-    D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC ovD = {D3D11_VPOV_DIMENSION_TEXTURE2D};
-    hr = m_pVid->CreateVideoProcessorOutputView(pOut, m_pVPEnum, &ovD, &pVPOV);
-    if (FAILED(hr)) {
-        SAFE_RELEASE(pVPIn);
-        PRINTERR(hr, "CreateVideoProcessorOutputView");
-        return hr;
-    }
-
-    /// Create a Video Processor Stream to run the operation
-    D3D11_VIDEO_PROCESSOR_STREAM stream = {TRUE, 0, 0, 0, 0, nullptr, pVPIn, nullptr};
-
-    /// Perform the Colorspace conversion
-    hr = m_pVidCtx->VideoProcessorBlt(m_pVP, pVPOV, 0, 1, &stream);
-    if (FAILED(hr)) {
-        SAFE_RELEASE(pVPIn);
-        PRINTERR(hr, "VideoProcessorBlt");
-        return hr;
-    }
-    SAFE_RELEASE(pVPIn);
-    SAFE_RELEASE(pVPOV);
-    return hr;
-}

+ 0 - 80
AvRecorder/test/d3d/convert.h

@@ -1,80 +0,0 @@
-/*
- * Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *  * Redistributions of source code must retain the above copyright
- *    notice, this list of conditions and the following disclaimer.
- *  * Redistributions in binary form must reproduce the above copyright
- *    notice, this list of conditions and the following disclaimer in the
- *    documentation and/or other materials provided with the distribution.
- *  * Neither the name of NVIDIA CORPORATION nor the names of its
- *    contributors may be used to endorse or promote products derived
- *    from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#pragma once
-#include <dxgi1_2.h>
-#include <d3d11_2.h>
-#include <unordered_map>
-
-class D3dConverter {
-    /// Simple Preprocessor class
-    /// Uses DXVAHD VideoProcessBlt to perform colorspace conversion
-private:
-    /// D3D11 device to be used for Processing
-    ID3D11Device* m_pDev = nullptr;
-    /// D3D11 device context to be used for Processing
-    ID3D11DeviceContext* m_pCtx = nullptr;
-    /// D3D11 video device to be used for Processing, obtained from d3d11 device
-    ID3D11VideoDevice* m_pVid = nullptr;
-    /// D3D11 video device context to be used for Processing, obtained from d3d11 device
-    ID3D11VideoContext* m_pVidCtx = nullptr;
-    /// DXVAHD video processor configured for processing.
-    /// Needs to be reconfigured based on input and output textures for each Convert() call
-    ID3D11VideoProcessor* m_pVP = nullptr;
-    /// DXVAHD VpBlt output target. Obtained from the output texture passed to Convert()
-    ID3D11VideoProcessorOutputView* m_pVPOut = nullptr;
-    /// D3D11 video processor enumerator. Required to configure Video processor streams
-    ID3D11VideoProcessorEnumerator* m_pVPEnum = nullptr;
-    /// Mapping of Texture2D handle and corresponding Video Processor output view handle
-    /// Optimization to avoid having to create video processor output views in each Convert() call
-    std::unordered_map<ID3D11Texture2D*, ID3D11VideoProcessorOutputView*> viewMap;
-    /// Input and Output Texture2D properties.
-    /// Required to optimize Video Processor stream usage
-    D3D11_TEXTURE2D_DESC m_inDesc = {0};
-    D3D11_TEXTURE2D_DESC m_outDesc = {0};
-    D3D11_VIDEO_PROCESSOR_COLOR_SPACE _inColorSpace;
-    D3D11_VIDEO_PROCESSOR_COLOR_SPACE _outColorSpace;
-
-public:
-    /// Initialize Video Context
-    HRESULT Open(ID3D11Device* pDev, ID3D11DeviceContext* pCtx,
-        const D3D11_VIDEO_PROCESSOR_COLOR_SPACE& inColorSpace, D3D11_VIDEO_PROCESSOR_COLOR_SPACE& outColorSpace);
-    /// Perform Colorspace conversion
-    HRESULT Convert(ID3D11Texture2D* pIn, ID3D11Texture2D* pOut);
-    /// Release all resources
-    void Close();
-
-public:
-    /// Constructor
-    // RGBToNV12(ID3D11Device *pDev, ID3D11DeviceContext *pCtx);
-    /// Destructor. Release all resources before destroying object
-    ~D3dConverter()
-    {
-        Close();
-    }
-};

+ 0 - 81
AvRecorder/test/d3d/gen_frame.cpp

@@ -1,81 +0,0 @@
-#include "gen_frame.h"
-
-#include <winrt/base.h>
-
-#undef min
-#undef max
-
-bool GenNv12Frame(ID3D11Device* device,
-                  ID3D11DeviceContext* ctx,
-                  const D3D11_TEXTURE2D_DESC& desc,
-                  ID3D11Texture2D* img,
-                  BufferFiller& buffers,
-                  AVFrame*& outFrame,
-                  D3dConverter& rgbToNv12)
-{
-    winrt::com_ptr<ID3D11Texture2D> nv12Img = nullptr;
-    if (FAILED(device->CreateTexture2D(&desc, nullptr, nv12Img.put()))) {
-        return false;
-    }
-    __CheckBool(SUCCEEDED(rgbToNv12.Convert(img, nv12Img.get())));
-    // 填充缓冲区
-    __CheckBool(buffers.Fill(device, desc));
-
-    ctx->CopyResource(buffers.GetCopy(), nv12Img.get());
-    D3D11_MAPPED_SUBRESOURCE resource;
-    __CheckBool(SUCCEEDED(ctx->Map(buffers.GetMap(), 0, D3D11_MAP_READ, 0, &resource)));
-    auto height = std::min(outFrame->height, (int)desc.Height);
-    auto width = outFrame->width;
-    auto srcLinesize = resource.RowPitch;
-    auto dstLinesize = outFrame->linesize[0];
-    auto srcData = (uint8_t*)resource.pData;
-    auto titleHeight = std::max(int(desc.Height - height), 0);
-    /* auto copyLine = std::min(std::min(width, (int) srcLinesize), dstLinesize);*/
-    auto border = (desc.Width - width) / 2;
-    __mtx.lock();
-
-    // Y
-    int Ystart = (titleHeight - border) * srcLinesize + border;
-    auto dstData = outFrame->data[0];
-    for (int row = 0; row < height; ++row) {
-        memcpy(dstData + row * dstLinesize, srcData + Ystart + row * srcLinesize, width);
-    }
-
-    // UV
-    dstData = outFrame->data[1];
-    int UVStart = srcLinesize * desc.Height + (titleHeight - border) / 2 * srcLinesize + border / 2 * 2;
-    for (int row = 0; row < height / 2; ++row) {
-        memcpy(dstData + row * dstLinesize, srcData + UVStart + row * srcLinesize, width);
-    }
-
-    __mtx.unlock();
-    ctx->Unmap(buffers.GetMap(), 0);
-    __CheckBool(buffers.Reset());
-    return true;
-}
-bool GenRgbFrame(ID3D11Device* device, ID3D11DeviceContext* ctx, const D3D11_TEXTURE2D_DESC& desc,
-    ID3D11Texture2D* img, BufferFiller& buffers, AVFrame*& outFrame)
-{
-    __CheckBool(buffers.Fill(device, desc));
-    ctx->CopyResource(buffers.GetCopy(), img);
-    D3D11_MAPPED_SUBRESOURCE resource;
-    __CheckBool(SUCCEEDED(ctx->Map(buffers.GetMap(), 0, D3D11_MAP_READ, 0, &resource)));
-    auto height = std::min(outFrame->height, (int)desc.Height);
-    auto width = outFrame->width;
-    auto srcLinesize = resource.RowPitch;
-    auto dstLinesize = outFrame->linesize[0];
-    auto srcData = (uint8_t*)resource.pData;
-    auto dstData = outFrame->data[0];
-    auto titleHeight = std::max(int(desc.Height - height), 0);
-    auto copyLine = std::min(std::min(width * 4, (int)srcLinesize), dstLinesize);
-    auto border = (desc.Width - width) / 2;
-    __mtx.lock();
-    for (int row = 0; row < height; ++row) {
-        auto offset = (titleHeight + row - border) * srcLinesize + border * 4;
-        memcpy(dstData + row * dstLinesize, srcData + offset, copyLine);
-    }
-    __mtx.unlock();
-    ctx->Unmap(buffers.GetMap(), 0);
-    __CheckBool(buffers.Reset());
-    return true;
-}

+ 0 - 13
AvRecorder/test/d3d/gen_frame.h

@@ -1,13 +0,0 @@
-#ifndef __GEN_FRAME_H__
-#define __GEN_FRAME_H__
-
-#include <d3d11.h>
-#include "buffer_filler.h"
-#include "basic/frame.h"
-#include "convert.h"
-
-bool GenNv12Frame(ID3D11Device* device, ID3D11DeviceContext* ctx, const D3D11_TEXTURE2D_DESC& desc,
-    ID3D11Texture2D* img, BufferFiller& buffers, AVFrame*& outFrame, D3dConverter& rgbToNv12);
-bool GenRgbFrame(ID3D11Device* device, ID3D11DeviceContext* ctx, const D3D11_TEXTURE2D_DESC& desc,
-    ID3D11Texture2D* img, BufferFiller& buffers, AVFrame*& outFrame);
-#endif

+ 0 - 14
AvRecorder/test/encoder/abstract_encoder.cpp

@@ -1,14 +0,0 @@
-
-#include "abstract_encoder.h"
-
-AVPacket* AbstractEncoder::Encode()
-{
-    int ret = avcodec_receive_packet(_codecCtx, _packet);
-    if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
-        return nullptr;
-    } else if (ret < 0) {
-        __DebugPrint("avcodec_receive_packet : Error during encoding");
-        return nullptr;
-    }
-    return _packet;
-}

+ 0 - 31
AvRecorder/test/encoder/abstract_encoder.h

@@ -1,31 +0,0 @@
-#ifndef __BASIC_ENCODER_H__
-#define __BASIC_ENCODER_H__
-
-#include "basic/basic.h"
-
-class AbstractEncoder
-{
-public:
-    AbstractEncoder() { _packet = av_packet_alloc(); }
-    AVCodecContext* GetCtx() const { return _codecCtx; }
-
-    virtual bool PushFrame(AVFrame* frame, bool isEnd, uint64_t pts) = 0;
-    AVPacket* Encode();
-    virtual void AfterEncode() {};
-    virtual void Close() = 0;
-    virtual ~AbstractEncoder()
-    {
-        Free(_packet, [this] { av_packet_free(&_packet); });
-    }
-
-protected:
-    bool _isOpen = false;
-    AVPacket* _packet = nullptr;
-    const AVCodec* _codec = nullptr;
-    AVCodecContext* _codecCtx = nullptr;
-};
-
-template<MediaType mediaType>
-class Encoder;
-
-#endif

+ 0 - 52
AvRecorder/test/encoder/audio_encoder.cpp

@@ -1,52 +0,0 @@
-
-#include "audio_encoder.h"
-
-bool Encoder<MediaType::AUDIO>::Open(const Param& audioParma, AVFormatContext* fmtCtx)
-{
-    Close();
-    _isOpen = false;
-    __CheckBool(_Init(audioParma, fmtCtx));
-    __CheckBool(avcodec_open2(_codecCtx, _codec, nullptr) >= 0);
-    _isOpen = true;
-    return true;
-}
-void Encoder<MediaType::AUDIO>::Close()
-{
-    if (_codecCtx != nullptr) {
-        avcodec_free_context(&_codecCtx);
-    }
-    Free(_codecCtx, [this] { avcodec_free_context(&_codecCtx); });
-}
-bool Encoder<MediaType::AUDIO>::_Init(const Param& audioParam, AVFormatContext* fmtCtx)
-{
-    // codec
-    __CheckBool(_codec = avcodec_find_encoder(AV_CODEC_ID_AAC));
-    // codeccontext
-    __CheckBool(_codecCtx = avcodec_alloc_context3(_codec));
-    _codecCtx->sample_fmt = AV_SAMPLE_FMT_FLTP;
-    _codecCtx->bit_rate = audioParam.bitRate;
-    _codecCtx->sample_rate = AUDIO_SAMPLE_RATE;
-    AVChannelLayout layout;
-    layout.order = AV_CHANNEL_ORDER_NATIVE;
-    layout.nb_channels = 1;
-    layout.u.mask = AV_CH_LAYOUT_MONO;
-    av_channel_layout_copy(&_codecCtx->ch_layout, &layout);
-    if (fmtCtx->oformat->flags & AVFMT_GLOBALHEADER) {
-        _codecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
-    }
-    return true;
-}
-
-bool Encoder<MediaType::AUDIO>::PushFrame(AVFrame* frame, bool isEnd, uint64_t pts)
-{
-    if (!isEnd) {
-        __CheckBool(frame);
-    } else {
-        frame = nullptr;
-    }
-    if (frame != nullptr) {
-        frame->pts = pts;
-    }
-    __CheckBool(avcodec_send_frame(_codecCtx, frame) >= 0);
-    return true;
-}

+ 0 - 24
AvRecorder/test/encoder/audio_encoder.h

@@ -1,24 +0,0 @@
-#ifndef __AUDIO_ENCODER_H__
-#define __AUDIO_ENCODER_H__
-
-#include "abstract_encoder.h"
-
-template<>
-class Encoder<MediaType::AUDIO> : public AbstractEncoder
-{
-public:
-    struct Param
-    {
-        int bitRate;
-    };
-    ~Encoder() { Close(); }
-
-    bool Open(const Param& audioParma, AVFormatContext* fmtCtx);
-    virtual void Close() override;
-    virtual bool PushFrame(AVFrame* frame, bool isEnd, uint64_t pts) override;
-
-private:
-    bool _Init(const Param& audioParam, AVFormatContext* fmtCtx);
-};
-
-#endif

+ 0 - 329
AvRecorder/test/encoder/audio_mixer.cpp

@@ -1,329 +0,0 @@
-#include "audio_mixer.h"
-
-#include "basic/basic.h"
-#include "basic/frame.h"
-
-#include <windows.h>
-
-AVSampleFormat BitsToFmt(int bits)
-{
-    switch (bits) {
-    case 8:
-        return AV_SAMPLE_FMT_U8;
-    case 16:
-        return AV_SAMPLE_FMT_S16;
-    case 32:
-        return AV_SAMPLE_FMT_S32;
-    case 64:
-        return AV_SAMPLE_FMT_S64;
-    default:
-        return AV_SAMPLE_FMT_FLT;
-    }
-}
-
-int FmtToBits(AVSampleFormat fmt)
-{
-    switch (fmt) {
-    case AV_SAMPLE_FMT_U8:
-        return 8;
-    case AV_SAMPLE_FMT_S16:
-        return 16;
-    case AV_SAMPLE_FMT_S32:
-        return 32;
-    case AV_SAMPLE_FMT_S64:
-        return 64;
-    default:
-        return 32;
-    }
-}
-
-int SizeToNbSamples(int size, int bitsPerSample, int nbChannels)
-{
-    return (size << 3) / bitsPerSample / nbChannels;
-}
-
-int NbSamplesToSize(int nbSamples, int bitsPerSample, int nbChannels)
-{
-    return (nbSamples * bitsPerSample * nbChannels) >> 3;
-}
-
-bool FrameQueue::Init(int channelNums, int sampleRate, AVSampleFormat fmt, int nbSamples)
-{
-    _front = 0;
-    _sampleRate = sampleRate;
-    _fmt = fmt;
-    _nbSamples = nbSamples;
-    _usedLinesize = nbSamples * channelNums * (fmt == AV_SAMPLE_FMT_S16 ? 2 : 4);
-    av_channel_layout_default(&_layout, channelNums);
-    _queue.emplace(fmt, &_layout, sampleRate, nbSamples);
-    return true;
-}
-
-Frame<MediaType::AUDIO> FrameQueue::Pop()
-{
-    if (_queue.size() > 1) {
-        auto frame = std::move(_queue.front());
-        _queue.pop();
-        return frame;
-    }
-    return Frame<MediaType::AUDIO>();
-}
-
-void FrameQueue::Push(uint8_t* data, int length)
-{
-    if (length > _usedLinesize) { // 递归调用
-        Push(data, length / 2);
-        Push(data + length / 2, length / 2 + length % 2);
-        return;
-    }
-    auto&& frame = _queue.back().frame;
-    int secondLength = _front + length - _usedLinesize;
-    if (secondLength <= 0) { // 第一段缓存是够用的
-        memcpy(frame->data[0] + _front, data, length);
-        _front += length;
-        return;
-    }
-    // 第一段缓存不够用
-    int firstLength = length - secondLength;
-    if (firstLength > 0) {
-        memcpy(frame->data[0] + _front, data, firstLength);
-    }
-    // 载入一段新缓存
-    _queue.emplace(_fmt, &_layout, _sampleRate, _nbSamples);
-    memcpy(_queue.back().frame->data[0], data + firstLength, secondLength);
-    _front = secondLength;
-}
-
-bool Resampler::Open(int inChannelNums,
-                     int inSampleRate,
-                     AVSampleFormat inFmt,
-                     int outChannelNums,
-                     int outSampleRate,
-                     AVSampleFormat outFmt,
-                     int outNbSample)
-{
-    Close();
-    __CheckBool(_swrCtx = swr_alloc());
-
-    AVChannelLayout tmpLayout;
-    av_channel_layout_default(&tmpLayout, inChannelNums);
-    av_opt_set_chlayout(_swrCtx, "in_chlayout", &tmpLayout, 0);
-    av_opt_set_int(_swrCtx, "in_sample_rate", inSampleRate, 0);
-    av_opt_set_sample_fmt(_swrCtx, "in_sample_fmt", inFmt, 0);
-    __CheckBool(_fromQueue.Init(inChannelNums, inSampleRate, inFmt, inSampleRate / 100 * 2));
-
-    av_channel_layout_default(&tmpLayout, outChannelNums);
-    av_opt_set_chlayout(_swrCtx, "out_chlayout", &tmpLayout, 0);
-    av_opt_set_int(_swrCtx, "out_sample_rate", outSampleRate, 0);
-    av_opt_set_sample_fmt(_swrCtx, "out_sample_fmt", outFmt, 0);
-    if (swr_init(_swrCtx) < 0) {
-        Close();
-        __DebugPrint("swr_init(_swrCtx) failed\n");
-        return false;
-    }
-    __CheckBool(_toQueue.Init(outChannelNums, outSampleRate, outFmt, outNbSample));
-    __CheckBool(_swrFrame = Frame<MediaType::AUDIO>::Alloc(outFmt,
-                                                           &tmpLayout,
-                                                           outSampleRate,
-                                                           outSampleRate / 100 * 2));
-
-    return true;
-}
-
-void Resampler::Close()
-{
-    Free(_swrCtx, [this] { swr_free(&_swrCtx); });
-    Free(_swrFrame, [this] { av_frame_free(&_swrFrame); });
-}
-
-bool Resampler::Convert(uint8_t* data, int size)
-{
-    std::vector<Frame<MediaType::AUDIO>> ret;
-    if (data == nullptr) {
-        return false;
-    }
-    _fromQueue.Push(data, size);
-    for (; true;) { // 转换
-        auto frame = _fromQueue.Pop();
-        if (frame.frame == nullptr) {
-            break;
-        }
-        __CheckNullptr(swr_convert(_swrCtx,
-                                   _swrFrame->data,
-                                   _swrFrame->nb_samples,
-                                   (const uint8_t**) frame.frame->data,
-                                   frame.frame->nb_samples));
-        _toQueue.Push(_swrFrame->data[0], _swrFrame->linesize[0]);
-    }
-    return true;
-}
-
-AVFrame* AudioMixer::Convert(uint32_t index, uint8_t* inBuf, uint32_t size)
-{
-    std::lock_guard<std::mutex> locker(_mutex);
-    auto iter = _audioInputInfos.find(index);
-    __CheckNullptr(iter != _audioInputInfos.end());
-    __CheckNullptr(iter->second.resampler->Convert(inBuf, size));
-    return _AdjustVolume() ? _outputFrame : nullptr;
-}
-
-bool AudioMixer::_AdjustVolume()
-{
-    // 检测所有流之间是不是相差太大了以及缓存的数据是不是太多了
-    // 如果缓存的数据太多,直接将所有的队列删除同样的数据
-    // 如果两个流直接数据相差太大,将多的那个减到和少的那个一样
-    constexpr int MAX_DIFF = 10;
-    constexpr int MAX_BUF_SIZE = 20;
-    int minSize = INT_MAX;
-    int maxSize = INT_MIN;
-    FrameQueue* maxQueue = nullptr;
-#undef min
-    for (auto&& iter : _audioInputInfos) {
-        auto&& queue = iter.second.resampler->GetQueue();
-        if (queue.IsEmpty()) {
-            return false;
-        }
-        minSize = std::min(minSize, (int) queue.GetSize());
-        if (maxSize < (int) queue.GetSize()) {
-            maxSize = (int) queue.GetSize();
-            maxQueue = &queue;
-        }
-    }
-
-    if (maxSize - minSize > MAX_DIFF) {
-        __DebugPrint("Clear MAX_DIFF");
-        for (int i = 0; i < maxSize - minSize; ++i) {
-            maxQueue->Pop();
-        }
-    }
-
-    for (auto iter = _audioInputInfos.begin(); iter != _audioInputInfos.end(); ++iter) {
-        auto&& frameQueue = iter->second.resampler->GetQueue();
-        if (minSize > MAX_BUF_SIZE) {
-            __DebugPrint("Clear MAX_BUF_SIZE");
-            for (int i = 0; i < minSize - 2; ++i) {
-                frameQueue.Pop();
-            }
-        }
-        auto frame = frameQueue.Pop();
-        auto scale = iter->second.scale;
-        auto writeStream = (float*) (_outputFrame->data[0]);
-        auto readStream = (float*) (frame.frame->data[0]);
-        iter->second.volume = readStream[0] * scale;
-
-        if (iter == _audioInputInfos.begin()) {
-            if (std::abs(scale - 1)
-                < 0.01) { // 这种情况可以直接使用 memcpy 而不是下面那种低效率的逐个赋值
-                memcpy(writeStream, readStream, _outputFrame->linesize[0]);
-                continue;
-            }
-            // 要进行 scale, 只能逐个赋值
-            // 所以这里要清零
-            memset(writeStream, 0, _outputFrame->linesize[0]);
-        }
-        // 逐个计算赋值
-        for (int idx = 0; idx < _outputFrame->nb_samples; ++idx) {
-            writeStream[idx] += readStream[idx] * scale;
-            if (writeStream[idx] > 0.99) {
-                writeStream[idx] = 0.99f;
-            }
-        }
-    }
-    return true;
-}
-
-AudioMixer::AudioMixer()
-    : _inited(false)
-{}
-
-AudioMixer::~AudioMixer()
-{
-    // delete out_buf;
-    if (_inited) {
-        Close();
-    }
-}
-
-bool AudioMixer::AddAudioInput(uint32_t index,
-                               uint32_t sampleRate,
-                               uint32_t channels,
-                               uint32_t bitsPerSample,
-                               AVSampleFormat format)
-{
-    std::lock_guard<std::mutex> locker(_mutex);
-    __CheckBool(!_inited);
-    // 根据index保存是否已经存在
-    __CheckBool(_audioInputInfos.find(index) == _audioInputInfos.end());
-
-    auto& filterInfo = _audioInputInfos[index];
-    // 设置音频相关参数
-    filterInfo.sampleRate = sampleRate;
-    filterInfo.channels = channels;
-    filterInfo.bitsPerSample = bitsPerSample;
-    filterInfo.format = format;
-    filterInfo.name = std::string("input") + std::to_string(index);
-    return true;
-}
-
-bool AudioMixer::AddAudioOutput(const uint32_t sampleRate,
-                                const uint32_t channels,
-                                const uint32_t bitsPerSample,
-                                const AVSampleFormat format)
-{
-    std::lock_guard<std::mutex> locker(_mutex);
-    __CheckBool(!_inited);
-    // 设置音频相关参数
-    _audioOutputInfo.sampleRate = sampleRate;
-    _audioOutputInfo.channels = channels;
-    _audioOutputInfo.bitsPerSample = bitsPerSample;
-    _audioOutputInfo.format = format;
-    _audioOutputInfo.name = "output";
-    return true;
-}
-
-bool AudioMixer::SetOutFrameSize(int outFrameSize)
-{
-    if (_outFrameSize == outFrameSize) {
-        return true;
-    }
-    _outFrameSize = outFrameSize;
-    for (auto&& filterInfoPair : _audioInputInfos) {
-        auto&& filterInfo = filterInfoPair.second;
-        filterInfo.resampler = std::make_unique<Resampler>();
-        __CheckBool(filterInfo.resampler->Open(filterInfo.channels,
-                                               filterInfo.sampleRate,
-                                               filterInfo.format,
-                                               _audioOutputInfo.channels,
-                                               _audioOutputInfo.sampleRate,
-                                               _audioOutputInfo.format,
-                                               outFrameSize));
-    }
-    AVChannelLayout tmpLayout;
-    av_channel_layout_default(&tmpLayout, _audioOutputInfo.channels);
-    Free(_outputFrame, [this] { av_frame_free(&_outputFrame); });
-    __CheckBool(_outputFrame = Frame<MediaType::AUDIO>::Alloc(_audioOutputInfo.format,
-                                                              &tmpLayout,
-                                                              _audioOutputInfo.sampleRate,
-                                                              outFrameSize));
-    _inited = true;
-    return true;
-}
-
-bool AudioMixer::Close()
-{
-    if (!_inited) {
-        return true;
-    }
-    _inited = false;
-    std::lock_guard<std::mutex> locker(_mutex);
-    _audioInputInfos.clear();
-    Free(_outputFrame, [this] { av_frame_free(&_outputFrame); });
-    _outFrameSize = 0;
-    return true;
-}
-
-AudioMixer::AudioInfo* AudioMixer::GetInputInfo(uint32_t index)
-{
-    auto iter = _audioInputInfos.find(index);
-    return iter == _audioInputInfos.end() ? nullptr : &(iter->second);
-}

+ 0 - 100
AvRecorder/test/encoder/audio_mixer.h

@@ -1,100 +0,0 @@
-#ifndef __AUDIO_MIXER_H__
-#define __AUDIO_MIXER_H__
-
-#include <cstdint>
-#include <memory>
-#include <mutex>
-#include <queue>
-#include <string>
-#include <unordered_map>
-
-extern "C" {
-#include <libavcodec/avcodec.h>
-#include <libavfilter/buffersink.h>
-#include <libavfilter/buffersrc.h>
-#include <libavformat/avformat.h>
-#include <libavutil/opt.h>
-#include <libswresample/swresample.h>
-}
-
-#include "basic/frame.h"
-
-#define __PCM1_FRAME_SIZE (4096 * 2)
-#define __PCM2_FRAME_SIZE (4096)
-#define __PCM_OUT_FRAME_SIZE (40000)
-
-// 循环缓存空间
-class FrameQueue {
-public:
-    bool Init(int channelNums, int sampleRate, AVSampleFormat fmt, int nbSamples);
-    Frame<MediaType::AUDIO> Pop();
-    void Push(uint8_t* data, int length);
-    bool IsEmpty() const { return _queue.size() < 2; }
-    auto GetSize() const { return _queue.size(); }
-
-private:
-    int _front = 0;
-    AVChannelLayout _layout;
-    int _sampleRate;
-    int _nbSamples;
-    int _usedLinesize;
-    AVSampleFormat _fmt;
-    std::queue<Frame<MediaType::AUDIO>> _queue;
-};
-
-class Resampler {
-public:
-    bool Open(int inChannelNums, int inSampleRate, AVSampleFormat inFmt,
-        int outChannelNums, int outSampleRate, AVSampleFormat outFmt, int outNbSample);
-    bool Convert(uint8_t* data, int size);
-    void Close();
-    FrameQueue& GetQueue() { return _toQueue; }
-    ~Resampler() { Close(); }
-
-private:
-    AVFrame* _swrFrame = nullptr;
-    SwrContext* _swrCtx = nullptr;
-    FrameQueue _fromQueue;
-    FrameQueue _toQueue;
-};
-
-class AudioMixer {
-public:
-    struct AudioInfo {
-        uint32_t sampleRate;
-        uint32_t channels;
-        uint32_t bitsPerSample;
-        AVSampleFormat format;
-        std::string name;
-        std::unique_ptr<Resampler> resampler;
-        float volume = 0;
-        float scale = 1;
-        int callTime = 0;
-    };
-    AudioMixer();
-    virtual ~AudioMixer();
-    // 添加音频输入通道
-    bool AddAudioInput(uint32_t index, uint32_t sampleRate, uint32_t channels,
-        uint32_t bitsPerSample, AVSampleFormat format);
-    // 添加音频输出通道
-    bool AddAudioOutput(const uint32_t sampleRate, const uint32_t channels,
-        const uint32_t bitsPerSample, const AVSampleFormat format);
-    AVFrame* Convert(uint32_t index, uint8_t* inBuf, uint32_t size);
-    bool SetOutFrameSize(int outputFrameSize = 1024);
-    int GetOutFrameSize() const { return _outFrameSize; };
-    bool Close();
-    AudioInfo* GetInputInfo(uint32_t index);
-
-private:
-    bool _inited = false;
-    std::mutex _mutex;
-    // 输入
-    std::unordered_map<uint32_t, AudioInfo> _audioInputInfos;
-    // 转换格式
-    AudioInfo _audioOutputInfo;
-    AVFrame* _outputFrame = nullptr;
-    bool _AdjustVolume();
-    int _outFrameSize = 0;
-};
-
-#endif // AUDIOMIXER_H

+ 0 - 194
AvRecorder/test/encoder/video_encoder.cpp

@@ -1,194 +0,0 @@
-#include "video_encoder.h"
-
-extern "C" {
-#include <libavutil/opt.h>
-}
-
-std::vector<std::string> Encoder<MediaType::VIDEO>::_usableEncoders;
-
-Encoder<MediaType::VIDEO>::Encoder() {}
-
-bool Encoder<MediaType::VIDEO>::Open(const Param& encodeParam, AVFormatContext* fmtCtx)
-{
-    Close();
-    _isOpen = false;
-    __CheckBool(_Init(encodeParam, fmtCtx));
-
-    // 打开编码器
-    __CheckBool(avcodec_open2(_codecCtx, _codec, nullptr) >= 0);
-
-    _isOpen = true;
-    return true;
-}
-
-bool Encoder<MediaType::VIDEO>::PushFrame(AVFrame* frame, bool isEnd, uint64_t pts)
-{
-    if (!isEnd) {
-        __CheckBool(_Trans(frame));
-        frame = _bufferFrame;
-        __CheckBool(frame);
-    } else {
-        frame = nullptr; // 直接刷新编码器缓存
-    }
-    if (frame != nullptr) {
-        frame->pts = pts;
-    }
-    __CheckBool(avcodec_send_frame(_codecCtx, frame) >= 0);
-    return true;
-}
-
-void Encoder<MediaType::VIDEO>::AfterEncode()
-{
-    if (_isHardware) {
-        Free(_hwFrame, [this] { av_frame_free(&_hwFrame); });
-    }
-}
-
-void Encoder<MediaType::VIDEO>::Close()
-{
-    if (_codecCtx != nullptr) {
-        avcodec_free_context(&_codecCtx);
-    }
-
-    Free(_codecCtx, [this] { avcodec_free_context(&_codecCtx); });
-    Free(_hwDeviceCtx, [this] { av_buffer_unref(&_hwDeviceCtx); });
-    _converter = nullptr;
-}
-
-const std::vector<std::string>& Encoder<MediaType::VIDEO>::GetUsableEncoders()
-{
-    if (_usableEncoders.empty()) {
-        _FindUsableEncoders();
-    }
-    return _usableEncoders;
-}
-
-void Encoder<MediaType::VIDEO>::_FindUsableEncoders()
-{
-    // 尝试打开编码器看看编码器能不能用
-    Param param;
-    param.bitRate = 1000;
-    param.fps = 30;
-    param.width = 1920;
-    param.height = 1080;
-    Encoder encoder;
-    AVFormatContext* fmtCtx = nullptr;
-
-    __CheckNo(avformat_alloc_output_context2(&fmtCtx, nullptr, nullptr, "test.mp4") >= 0);
-    for (const auto& name : _encoderNames) {
-        if (strcmp(name, "libx264") == 0) { // 软件编码器必定支持
-            _usableEncoders.push_back(name);
-            continue;
-        }
-        param.name = name;
-        if (encoder.Open(param, fmtCtx)) {
-            _usableEncoders.push_back(name);
-        }
-        encoder.Close();
-    }
-    Free(fmtCtx, [&fmtCtx] { avformat_free_context(fmtCtx); });
-}
-
-bool Encoder<MediaType::VIDEO>::_Init(const Param& encodeParam, AVFormatContext* fmtCtx)
-{
-    _isHardware = encodeParam.name != "libx264";
-    AVHWDeviceType hwType;
-    if (encodeParam.name == "libx264") {
-        _pixFmt = AV_PIX_FMT_NV12;
-    } else if (encodeParam.name == "h264_nvenc") {
-        _pixFmt = AV_PIX_FMT_CUDA;
-        hwType = AV_HWDEVICE_TYPE_CUDA;
-    } else if (encodeParam.name == "h264_qsv") {
-        _pixFmt = AV_PIX_FMT_QSV;
-        hwType = AV_HWDEVICE_TYPE_QSV;
-    } else if (encodeParam.name == "h264_amf") {
-        _pixFmt = AV_PIX_FMT_VULKAN;
-        hwType = AV_HWDEVICE_TYPE_VULKAN;
-    }
-    _isHardware = _pixFmt != AV_PIX_FMT_NV12;
-    if (_isHardware
-        && av_hwdevice_ctx_create(&_hwDeviceCtx, hwType, nullptr, nullptr, 0) < 0) { // 硬件解码
-        __DebugPrint("av_hwdevice_ctx_create failed\n");
-        return false;
-    }
-    __CheckBool(_codec = avcodec_find_encoder_by_name(encodeParam.name.c_str()));
-    __CheckBool(_codecCtx = avcodec_alloc_context3(_codec));
-    _codecCtx->bit_rate = encodeParam.bitRate;
-    _codecCtx->width = encodeParam.width;
-    _codecCtx->height = encodeParam.height;
-    _codecCtx->time_base = {1, encodeParam.fps};
-    _codecCtx->framerate = {encodeParam.fps, 1};
-
-    // 影响缓冲区大小
-    _codecCtx->gop_size = 10;
-    _codecCtx->max_b_frames = 1;
-    _codecCtx->pix_fmt = _pixFmt;
-
-    /* Some formats want stream headers to be separate. */
-    if (fmtCtx->oformat->flags & AVFMT_GLOBALHEADER) {
-        _codecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
-    }
-
-    if (!_isHardware) { // 软件编码设置为快,避免占用过高的 CPU ,反正硬盘不值钱
-        av_opt_set(_codecCtx->priv_data, "preset", "veryfast", 0);
-    }
-
-    __CheckBool(!_isHardware || _SetHwFrameCtx());
-    return true;
-}
-bool Encoder<MediaType::VIDEO>::_SetHwFrameCtx()
-{
-    AVBufferRef* hwFramesRef;
-    AVHWFramesContext* framesCtx = nullptr;
-
-    __CheckBool(hwFramesRef = av_hwframe_ctx_alloc(_hwDeviceCtx));
-    framesCtx = (AVHWFramesContext*) (hwFramesRef->data);
-    framesCtx->format = _pixFmt;
-    framesCtx->sw_format = AV_PIX_FMT_NV12;
-    framesCtx->width = _codecCtx->width;
-    framesCtx->height = _codecCtx->height;
-    framesCtx->initial_pool_size = 20;
-    if (av_hwframe_ctx_init(hwFramesRef) < 0) {
-        __DebugPrint("av_hwframe_ctx_init failed\n");
-        av_buffer_unref(&hwFramesRef);
-        return false;
-    }
-    __CheckBool(_codecCtx->hw_frames_ctx = av_buffer_ref(hwFramesRef));
-    av_buffer_unref(&hwFramesRef);
-    return true;
-}
-
-bool Encoder<MediaType::VIDEO>::_Trans(AVFrame* frame)
-{
-    std::lock_guard<std::mutex> lk(__mtx);
-    if (!_isOpen) {
-        return false;
-    }
-    if (frame->format == AV_PIX_FMT_NV12) {
-        _bufferFrame = frame;
-    } else {
-        if (_converter == nullptr) {
-            _converter = std::make_unique<FfmpegConverter>(AVPixelFormat(frame->format),
-                                                           AV_PIX_FMT_NV12);
-            _converter->SetSize(frame->width, frame->height);
-        }
-        _bufferFrame = _converter->Trans(frame);
-    }
-    if (_isHardware) {
-        _bufferFrame = _ToHardware();
-    }
-    __CheckBool(_bufferFrame);
-    return true;
-}
-
-AVFrame* Encoder<MediaType::VIDEO>::_ToHardware()
-{
-    if (_bufferFrame == nullptr) {
-        return nullptr;
-    }
-    __CheckNullptr(_hwFrame = av_frame_alloc());
-    __CheckNullptr(av_hwframe_get_buffer(_codecCtx->hw_frames_ctx, _hwFrame, 0) >= 0);
-    __CheckNullptr(_hwFrame->hw_frames_ctx);
-    __CheckNullptr(av_hwframe_transfer_data(_hwFrame, _bufferFrame, 0) >= 0);
-    return _hwFrame;
-}

+ 0 - 47
AvRecorder/test/encoder/video_encoder.h

@@ -1,47 +0,0 @@
-#ifndef __VIDEO_ENCODER_H__
-#define __VIDEO_ENCODER_H__
-
-#include "abstract_encoder.h"
-#include "basic/frame.h"
-
-template <>
-class Encoder<MediaType::VIDEO> : public AbstractEncoder {
-public:
-    struct Param {
-        int bitRate;
-        int width;
-        int height;
-        int fps;
-        std::string name;
-    };
-    Encoder();
-    ~Encoder() { Close(); }
-    bool Open(const Param& encodeParam, AVFormatContext* fmtCtx);
-    virtual bool PushFrame(AVFrame* frame, bool isEnd, uint64_t pts) override;
-    virtual void AfterEncode() override;
-    virtual void Close() override;
-    static const std::vector<std::string>& GetUsableEncoders();
-
-private:
-    bool
-    _Init(const Param& encodeParam, AVFormatContext* fmtCtx);
-    bool _SetHwFrameCtx();
-    bool _Trans(AVFrame* frame);
-    AVFrame* _ToHardware();
-    static void _FindUsableEncoders();
-    bool _isHardware = false;
-    std::unique_ptr<FfmpegConverter> _converter = nullptr;
-    AVFrame* _bufferFrame = nullptr;
-    static constexpr const char* _encoderNames[4] = {
-        "h264_nvenc",
-        "h264_qsv",
-        "h264_amf",
-        "libx264",
-    };
-    static std::vector<std::string> _usableEncoders;
-    AVBufferRef* _hwDeviceCtx = nullptr;
-    AVFrame* _hwFrame = nullptr;
-    AVPixelFormat _pixFmt = AV_PIX_FMT_NV12;
-};
-
-#endif

+ 0 - 150
AvRecorder/test/muxer/av_muxer.cpp

@@ -1,150 +0,0 @@
-
-#include "av_muxer.h"
-
-bool AvMuxer::Open(std::string_view filePath, std::string_view format)
-{
-    Close();
-    _isOpenFile = false;
-    _filePath = filePath;
-    __CheckBool(avformat_alloc_output_context2(&_fmtCtx, nullptr, format.data(), _filePath.c_str()) >= 0);
-    __CheckBool(_fmtCtx);
-    return true;
-}
-
-bool AvMuxer::WriteHeader()
-{
-    av_dump_format(_fmtCtx, 0, _filePath.data(), 1);
-    // 打开输出文件
-    if (!(_fmtCtx->oformat->flags & AVFMT_NOFILE)) {
-        __CheckBool(avio_open(&_fmtCtx->pb, _filePath.c_str(), AVIO_FLAG_WRITE) >= 0);
-    }
-    // 写入文件头
-    __CheckBool(avformat_write_header(_fmtCtx, nullptr) >= 0);
-    _isOpenFile = true;
-    return true;
-}
-
-int AvMuxer::AddVideoStream(const Encoder<MediaType::VIDEO>::Param& param)
-{
-    __Check(-1, _fmtCtx->oformat->video_codec != AV_CODEC_ID_NONE);
-    Info info;
-    info.pts = 0;
-    info.fps = param.fps;
-    auto encoder = new Encoder<MediaType::VIDEO>;
-    __Check(-1, encoder->Open(param, _fmtCtx));
-    info.type = MediaType::VIDEO;
-    info.encoder = encoder;
-    __Check(-1, _AddStream(info));
-    _infos.back().stream->time_base = {1, info.fps};
-    return info.streamIndex;
-}
-
-int AvMuxer::AddAudioStream(const Encoder<MediaType::AUDIO>::Param& param)
-{
-    __Check(-1, _fmtCtx->oformat->audio_codec != AV_CODEC_ID_NONE);
-    Info info;
-    info.pts = 0;
-    info.fps = AUDIO_SAMPLE_RATE;
-    auto encoder = new Encoder<MediaType::AUDIO>;
-    info.type = MediaType::AUDIO;
-    info.encoder = encoder;
-    __Check(-1, encoder->Open(param, _fmtCtx));
-    __Check(-1, _AddStream(info));
-    _infos.back().stream->time_base = {1, AUDIO_SAMPLE_RATE};
-    return info.streamIndex;
-}
-
-bool AvMuxer::Write(AVFrame* frame, int streamIndex, bool isEnd)
-{
-    // 此函数不能被多个流同时调用
-    std::lock_guard<std::mutex> lk(_mtx);
-    __CheckBool(_infos.size() > streamIndex);
-    auto&& info = _infos[streamIndex];
-    if (info.isEnd) {
-        return true;
-    }
-    if (isEnd) {
-        info.isEnd = isEnd;
-        frame = nullptr;
-    }
-    __CheckBool(info.encoder);
-    // 检测流之间时间是不是差的太多,如果差的太多,直接弃掉数据多的流数据
-    if (!_CheckTime(double(info.pts) / info.fps)) {
-        info.isEncodeOverload = true;
-        return false;
-    }
-    info.isEncodeOverload = false;
-    __CheckBool(info.encoder->PushFrame(frame, isEnd, info.pts));
-    info.pts += info.type == MediaType::AUDIO ? info.encoder->GetCtx()->frame_size : 1; // 更新 pts
-    AVPacket* packet = nullptr;
-    while ((packet = info.encoder->Encode())) {
-        av_packet_rescale_ts(packet, info.encoder->GetCtx()->time_base, info.stream->time_base);
-        packet->stream_index = info.stream->index;
-        __CheckBool(av_interleaved_write_frame(_fmtCtx, packet) >= 0);
-    }
-    info.encoder->AfterEncode();
-    return true;
-}
-
-bool AvMuxer::_CheckTime(double time)
-{
-    auto minTime = double(_infos.front().pts) / _infos.front().fps;
-    for (int idx = 1; idx < _infos.size(); ++idx) {
-        minTime = std::min(double(_infos[idx].pts) / _infos[idx].fps, minTime);
-    }
-    if (time - minTime > 0.1) { // 说明相差的太多了,下一帧不能再送往编码器
-        return false;
-    }
-    return true;
-}
-
-void AvMuxer::Close()
-{
-    if (_fmtCtx == nullptr) {
-        return;
-    }
-    // 清空编码器缓存
-    for (int index = 0; index < _infos.size(); ++index) {
-        __DebugPrint("stream: %d, time:%f", index, double(_infos[index].pts) / _infos[index].fps);
-    }
-    if (_isOpenFile) {
-        __CheckNo(av_write_trailer(_fmtCtx) >= 0);
-        Free(_fmtCtx->pb, [this] { avio_closep(&_fmtCtx->pb); });
-    }
-    _isOpenFile = false;
-
-    for (auto&& info : _infos) {
-        info.encoder->Close();
-        Free(info.encoder, [&info] {info.encoder->Close(); delete info.encoder; });
-    }
-    _infos.clear();
-    Free(_fmtCtx, [this] { avformat_free_context(_fmtCtx); });
-}
-
-bool AvMuxer::_AddStream(Info& info)
-{
-    __CheckBool(info.stream = avformat_new_stream(_fmtCtx, nullptr));
-    info.stream->id = _fmtCtx->nb_streams - 1;
-    __CheckBool(avcodec_parameters_from_context(info.stream->codecpar, info.encoder->GetCtx()) >= 0);
-    info.streamIndex = _fmtCtx->nb_streams - 1;
-    info.pts = 0;
-    info.isEnd = false;
-    _infos.push_back(info);
-    return true;
-}
-
-AVCodecContext* AvMuxer::GetCodecCtx(int streamIndex)
-{
-    __CheckNullptr(streamIndex >= 0 && _infos.size() > streamIndex);
-    return _infos[streamIndex].encoder->GetCtx();
-}
-
-bool AvMuxer::IsEncodeOverload() const
-{
-    for (auto&& info : _infos) {
-        if (info.isEncodeOverload) {
-            return true;
-        }
-    }
-    return false;
-}

+ 0 - 43
AvRecorder/test/muxer/av_muxer.h

@@ -1,43 +0,0 @@
-#ifndef __AV_MUXER_H__
-#define __AV_MUXER_H__
-
-#include "encoder/audio_encoder.h"
-#include "encoder/video_encoder.h"
-
-class AvMuxer {
-public:
-    struct Info {
-        MediaType type;
-        AbstractEncoder* encoder = nullptr;
-        AVStream* stream = nullptr;
-        int streamIndex = -1;
-        int fps = 30;
-        uint64_t pts = 0;
-        bool isEnd = false;
-        bool isEncodeOverload = false;
-    };
-    ~AvMuxer()
-    {
-        Close();
-    }
-    bool Open(std::string_view filePath, std::string_view format = "mp4");
-    bool WriteHeader();
-    // 返回值为创建的流的索引 ,-1表示创建失败
-    int AddVideoStream(const Encoder<MediaType::VIDEO>::Param& param);
-    int AddAudioStream(const Encoder<MediaType::AUDIO>::Param& param);
-    bool Write(AVFrame* frame, int streamIndex, bool isEnd = false);
-    void Close();
-    AVCodecContext* GetCodecCtx(int streamIndex);
-    bool IsEncodeOverload() const;
-
-private:
-    std::mutex _mtx;
-    bool _isOpenFile = false;
-    bool _AddStream(Info& info);
-    bool _CheckTime(double time);
-    std::vector<Info> _infos;
-    AVFormatContext* _fmtCtx = nullptr;
-    std::string _filePath;
-};
-
-#endif

+ 0 - 105
AvRecorder/test/recorder/audio_recorder.cpp

@@ -1,105 +0,0 @@
-#include "audio_recorder.h"
-
-bool AudioRecorder::Open(const std::vector<AudioCapturer::Type>& deviceTypes,
-                         Encoder<MediaType::AUDIO>::Param& param,
-                         const uint32_t sampleRate,
-                         const uint32_t channels,
-                         const uint32_t bitsPerSample,
-                         const AVSampleFormat format)
-{
-    Close();
-    Info mixInfo;
-    mixInfo.mixer = &_mixer;
-    mixInfo.isRecord = &_isRecord;
-    mixInfo.streamIndex = &_streamIndex;
-
-    for (int index = 0; index < deviceTypes.size(); ++index) {
-        mixInfo.mixIndex = index;
-        _infos.push_back(mixInfo);
-    }
-    for (int index = 0; index < deviceTypes.size(); ++index) {
-        auto&& capturer = deviceTypes[index] == AudioCapturer::Microphone ? _micCapturer
-                                                                          : _speakerCapturer;
-        if (!capturer.Init(deviceTypes[index], _Callback, &(_infos[index]))) {
-            continue;
-        }
-        auto&& format = capturer.GetFormat();
-        __CheckBool(_mixer.AddAudioInput(index,
-                                         format.nSamplesPerSec,
-                                         format.nChannels,
-                                         format.wBitsPerSample,
-                                         _GetAVSampleFormat(format.wBitsPerSample)));
-    }
-    __CheckBool(_mixer.AddAudioOutput(sampleRate, channels, bitsPerSample, format));
-    _param = param;
-    __CheckBool(_mixer.SetOutFrameSize(1024));
-
-    for (int index = 0; index < deviceTypes.size(); ++index) {
-        if (_mixer.GetInputInfo(index) != nullptr) {
-            auto&& capturer = deviceTypes[index] == AudioCapturer::Microphone ? _micCapturer
-                                                                              : _speakerCapturer;
-            __CheckBool(capturer.Start());
-        }
-    }
-
-    return true;
-}
-
-void AudioRecorder::Close()
-{
-    StopRecord();
-    _micCapturer.Stop();
-    _speakerCapturer.Stop();
-    _mixer.Close();
-    _infos.clear();
-}
-
-void AudioRecorder::SetVolumeScale(float scale, int mixIndex)
-{
-    auto info = _mixer.GetInputInfo(mixIndex);
-    if (info != nullptr) {
-        info->scale = scale;
-    }
-}
-
-bool AudioRecorder::LoadMuxer(AvMuxer& muxer)
-{
-    for (auto&& info : _infos) {
-        info.muxer = &muxer;
-    }
-    __CheckBool((_streamIndex = muxer.AddAudioStream(_param)) != -1);
-    return true;
-}
-
-bool AudioRecorder::StartRecord()
-{
-    _isRecord = true;
-    return true;
-}
-
-void AudioRecorder::StopRecord()
-{
-    _isRecord = false;
-}
-
-void AudioRecorder::_Callback(void* data, size_t size, void* userInfo)
-{
-    auto info = (Info*) userInfo;
-    /* auto inputInfo =*/info->mixer->GetInputInfo(info->mixIndex);
-    auto frame = info->mixer->Convert(info->mixIndex, (uint8_t*) data, size);
-    if (frame == nullptr) {
-        return;
-    }
-    if (*(info->isRecord)) {
-        __CheckNo(info->streamIndex && *(info->streamIndex) != -1);
-        int frameSize = info->muxer->GetCodecCtx(*info->streamIndex)->frame_size;
-        if (info->mixer->GetOutFrameSize() != frameSize) {
-            __DebugPrint("Change frame size from %d to %d",
-                         info->mixer->GetOutFrameSize(),
-                         frameSize);
-            info->mixer->SetOutFrameSize(frameSize);
-            return;
-        }
-        __CheckNo(info->muxer->Write(frame, *(info->streamIndex)));
-    }
-}

+ 0 - 49
AvRecorder/test/recorder/audio_recorder.h

@@ -1,49 +0,0 @@
-#ifndef __AUDIO_RECORDER_H__
-#define __AUDIO_RECORDER_H__
-
-#include "capturer/audio_capturer.h"
-#include "encoder/audio_mixer.h"
-#include "muxer/av_muxer.h"
-
-class AudioRecorder {
-public:
-    struct Info {
-        AudioMixer* mixer = nullptr;
-        AvMuxer* muxer = nullptr;
-        bool* isRecord = nullptr;
-        int mixIndex;
-        int* streamIndex = nullptr;
-    };
-
-    bool Open(const std::vector<AudioCapturer::Type>& deviceTypes,
-        Encoder<MediaType::AUDIO>::Param& param,
-        const uint32_t sampleRate = AUDIO_SAMPLE_RATE,
-        const uint32_t channels = AUDIO_CHANNEL,
-        const uint32_t bitsPerSample = 32,
-        const AVSampleFormat format = AUDIO_FMT);
-    bool LoadMuxer(AvMuxer& muxer);
-    bool StartRecord();
-    void StopRecord();
-    void Close();
-    auto GetCaptureInfo(int mixIndex)
-    {
-        return _mixer.GetInputInfo(mixIndex);
-    }
-    void SetVolumeScale(float scale, int mixIndex);
-
-private:
-    AudioCapturer _micCapturer;
-    AudioCapturer _speakerCapturer;
-    AudioMixer _mixer;
-    std::vector<Info> _infos;
-    bool _isRecord = false;
-    int _streamIndex;
-    Encoder<MediaType::AUDIO>::Param _param;
-    static void _Callback(void* data, size_t size, void* userInfo);
-    AVSampleFormat _GetAVSampleFormat(int wBitsPerSample)
-    {
-        return wBitsPerSample == 16 ? AV_SAMPLE_FMT_S16 : AV_SAMPLE_FMT_S32;
-    }
-};
-
-#endif

+ 0 - 109
AvRecorder/test/recorder/video_recorder.cpp

@@ -1,109 +0,0 @@
-#include "video_recorder.h"
-#include "avrecorder/capturer/video/VideoCaptureManager.h"
-using namespace avrecorder::video;
-
-bool VideoRecorder::Open(HWND srcHwnd, Encoder<MediaType::VIDEO>::Param& param, CaptureMethod method) {
-    VideoCaptureManager capturer;
-    CaptureTarget target;
-    target.type = CaptureTargetType::Window;
-    target.hwnd = srcHwnd;
-    int width = param.width;
-    int height = param.height;
-    return capturer.open(target, method, width, height);
-}
-
-bool VideoRecorder::Open(int monitorIdx, Encoder<MediaType::VIDEO>::Param& param, CaptureMethod method) {
-    VideoCaptureManager capturer;
-    CaptureTarget target;
-    target.type = CaptureTargetType::Monitor;
-    target.monitorIdx = monitorIdx;
-    int width = param.width;
-    int height = param.height;
-    return capturer.open(target, method, width, height);
-}
-
-bool VideoRecorder::_Open(Encoder<MediaType::VIDEO>::Param& param)
-{
-    __CheckBool(_encodeFrame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_NV12,
-                                                              _capturer.GetWidth(),
-                                                              _capturer.GetHeight()));
-    {
-        std::lock_guard<std::mutex> renderLk(_renderMtx);
-        __CheckBool(_renderFrame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_NV12,
-                                                                  _capturer.GetWidth(),
-                                                                  _capturer.GetHeight()));
-    }
-
-    // 开始捕获画面
-    _captureTimer.Start(param.fps, [this] {
-        auto srcFrame = _capturer.GetFrame();
-        if (srcFrame != nullptr) {
-            std::lock_guard<std::mutex> muxLk(__mtx);
-            if (srcFrame->format != _encodeFrame->format) {
-                std::lock_guard<std::mutex> renderLk(_renderMtx);
-                Free(_encodeFrame, [this] { av_frame_free(&_encodeFrame); });
-                __CheckNo(
-                    _encodeFrame = Frame<MediaType::VIDEO>::Alloc(AVPixelFormat(srcFrame->format),
-                                                                  _capturer.GetWidth(),
-                                                                  _capturer.GetHeight()));
-            }
-            av_frame_copy(_encodeFrame, srcFrame);
-        }
-    });
-    param.width = _capturer.GetWidth();
-    param.height = _capturer.GetHeight();
-    _param = param;
-    return true;
-}
-
-AVFrame* VideoRecorder::GetRenderFrame()
-{
-    std::lock_guard<std::mutex> renderLk(_renderMtx);
-    if (_encodeFrame == nullptr) {
-        return nullptr;
-    }
-    if (_renderFrame->format != _encodeFrame->format) {
-        Free(_renderFrame, [this] { av_frame_free(&_renderFrame); });
-        __CheckNullptr(
-            _renderFrame = Frame<MediaType::VIDEO>::Alloc(AVPixelFormat(_encodeFrame->format),
-                                                          _capturer.GetWidth(),
-                                                          _capturer.GetHeight()));
-    }
-    av_frame_copy(_renderFrame, _encodeFrame);
-    return _renderFrame;
-}
-
-bool VideoRecorder::LoadMuxer(AvMuxer& muxer)
-{
-    _muxer = &muxer;
-    __CheckBool((_streamIndex = muxer.AddVideoStream(_param)) != -1);
-    return true;
-}
-
-bool VideoRecorder::StartRecord()
-{
-    _totalPts = 0;
-    _lossPts = 0;
-    _muxTimer.Start(_param.fps, [this] {
-        ++_totalPts;
-        if (!_muxer->Write(_encodeFrame, _streamIndex)) {
-            ++_lossPts;
-        }
-    });
-    _isRecord = true;
-    return true;
-}
-void VideoRecorder::StopRecord()
-{
-    _isRecord = false;
-    _muxTimer.Stop();
-}
-
-void VideoRecorder::Close()
-{
-    StopRecord();
-    _captureTimer.Stop();
-    _capturer.Close();
-    Free(_encodeFrame, [this] { av_frame_free(&_encodeFrame); });
-    Free(_renderFrame, [this] { av_frame_free(&_renderFrame); });
-}

+ 0 - 44
AvRecorder/test/recorder/video_recorder.h

@@ -1,44 +0,0 @@
-#ifndef __VIDEO_RECORDER_H__
-#define __VIDEO_RECORDER_H__
-
-#include "basic/timer.h"
-#include "avrecorder/capturer/video/VideoCaptureManager.h"
-using namespace avrecorder::video;
-#include "muxer/av_muxer.h"
-// #include <condition_variable>
-// #include <queue>
-
-class VideoRecorder {
-public:
-    bool Open(HWND srcHwnd, Encoder<MediaType::VIDEO>::Param& param, CaptureMethod method);
-    bool Open(int monitorIdx, Encoder<MediaType::VIDEO>::Param& param, CaptureMethod method);
-    bool LoadMuxer(AvMuxer& muxer);
-    bool StartRecord();
-    void StopRecord();
-    auto GetCapturerType() { return _capturer.GetMethod(); }
-    AVFrame* GetRenderFrame();
-    // 停止录制
-    void Close();
-    void SetIsDrawCursor(bool isDraw)
-    {
-        _capturer.SetDrawCursor(isDraw);
-    }
-    bool IsCaptureOverload() const { return _captureTimer.IsOverload(); }
-    double GetLossRate() { return _lossPts == 0 ? 0 : (double)_lossPts / _totalPts; }
-
-private:
-    bool _Open(Encoder<MediaType::VIDEO>::Param& param);
-    VideoCapturer _capturer;
-    AvMuxer* _muxer = nullptr;
-    bool _isRecord = false;
-    int _streamIndex = -1;
-    AVFrame* _encodeFrame = nullptr;
-    AVFrame* _renderFrame = nullptr;
-    Encoder<MediaType::VIDEO>::Param _param;
-    Timer _captureTimer;
-    Timer _muxTimer;
-    std::mutex _renderMtx;
-    uint64_t _totalPts = 0;
-    uint64_t _lossPts = 0;
-};
-#endif

+ 0 - 37
AvRecorder/test/ui/audio_render.cpp

@@ -1,37 +0,0 @@
-#include "audio_render.h"
-
-#include <QPainter>
-
-AudioRender::AudioRender(QLabel* parent)
-    : QLabel(parent)
-{
-}
-void AudioRender::ShowVolume(float volume)
-{
-    float val = 0;
-    if (volume < 0) {
-        volume = -volume;
-    }
-
-    if (volume > 0.001) {
-        val = (20.0f * log10(volume) + 60.0f) / 60.0f;
-    }
-
-    float diff = val - _lastShowVal;
-    if (diff < -0.015f) {
-        diff = -0.015f;
-    }
-    _lastShowVal += diff;
-}
-
-void AudioRender::paintEvent(QPaintEvent* event)
-{
-    int val = _lastShowVal * width();
-    QPainter painter(this);
-    QPen pen(Qt::green, height());
-    painter.setPen(pen);
-    painter.drawLine(0, 0, val, 0);
-    pen.setColor(Qt::gray);
-    painter.setPen(pen);
-    painter.drawLine(val, 0, width(), 0);
-}

+ 0 - 17
AvRecorder/test/ui/audio_render.h

@@ -1,17 +0,0 @@
-#ifndef __AUDIO_RENDER_H__
-#define __AUDIO_RENDER_H__
-
-// 这里直接使用 Qt 中的 QLabel 进行音量的渲染
-
-#include <QLabel>
-
-class AudioRender : public QLabel {
-public:
-    AudioRender(QLabel* parent = nullptr);
-    void ShowVolume(float volume);
-
-protected:
-    virtual void paintEvent(QPaintEvent* event) override;
-    float _lastShowVal = 0;
-};
-#endif

+ 0 - 49
AvRecorder/test/ui/audio_widget.cpp

@@ -1,49 +0,0 @@
-#include "audio_widget.h"
-#include <QLayout>
-#include <qglobal.h>
-
-AudioWidget::AudioWidget(QWidget* parent)
-    : QWidget(parent)
-{
-    _CreateUi();
-    _CreateConnect();
-    _mutebox->setChecked(true);
-}
-
-void AudioWidget::_CreateUi()
-{
-    auto hLayout = new QHBoxLayout;
-    _nameLabel = new QLabel;
-    _mutebox = new QCheckBox("静音");
-    _render = new AudioRender;
-    _volumeBox = new QDoubleSpinBox;
-    _volumeBox->setMinimum(0);
-    _volumeBox->setValue(1);
-    hLayout->addWidget(_nameLabel);
-    hLayout->addWidget(_mutebox);
-    auto scaleLayout = new QHBoxLayout;
-    scaleLayout->addWidget(new QLabel("调幅:"));
-    scaleLayout->addWidget(_volumeBox);
-    hLayout->addLayout(scaleLayout);
-    auto vLayout = new QVBoxLayout;
-    vLayout->addLayout(hLayout);
-    vLayout->addWidget(_render);
-    setLayout(vLayout);
-}
-
-void AudioWidget::_CreateConnect()
-{
-    connect(_mutebox, &QCheckBox::stateChanged, [this](int) {
-        if (_mutebox->isChecked()) {
-            emit SetVolumeScale(0);
-            _volumeBox->setEnabled(false);
-        } else {
-            _volumeBox->setEnabled(true);
-            emit SetVolumeScale(_volumeBox->value());
-        }
-    });
-
-    connect(_volumeBox, QOverload<double>::of(&QDoubleSpinBox::valueChanged), [this] {
-        emit SetVolumeScale(_volumeBox->value());
-    });
-}

+ 0 - 37
AvRecorder/test/ui/audio_widget.h

@@ -1,37 +0,0 @@
-#ifndef __AUDIO_WIDGET_H__
-#define __AUDIO_WIDGET_H__
-
-#include <QCheckBox>
-#include <QLabel>
-#include <QPushButton>
-#include <QSlider>
-#include <QSpinBox>
-#include "audio_render.h"
-
-class AudioWidget : public QWidget
-{
-    Q_OBJECT
-public:
-    AudioWidget(QWidget* parent = nullptr);
-    void ShowVolume(float volume)
-    {
-        _render->ShowVolume(volume);
-        _render->update();
-    }
-    void SetName(const std::string& name) { _nameLabel->setText(name.c_str()); }
-    double GetVolume() { return _mutebox->isChecked() ? 0 : _volumeBox->value(); }
-
-private:
-    void _CreateUi();
-    void _CreateConnect();
-    QLabel* _nameLabel = nullptr;
-    AudioRender* _render = nullptr;
-    QCheckBox* _mutebox = nullptr;
-    QDoubleSpinBox* _volumeBox = nullptr;
-    float _lastShowVal = 0;
-
-signals:
-    void SetVolumeScale(float scale);
-};
-
-#endif

+ 0 - 386
AvRecorder/test/ui/av_recorder.cpp

@@ -1,386 +0,0 @@
-#include "av_recorder.h"
-
-#include <QDateTime>
-#include <QStatusBar>
-#include <capturer/finder.h>
-#include "avrecorder/capturer/video/VideoCaptureManager.h"
-using namespace avrecorder::video;
-
-AvRecorder::AvRecorder(QWidget* parent)
-    : QWidget(parent)
-{
-    setWindowTitle("Recorder");
-    _settingsParam.audioParam.bitRate = 160'000;
-    _settingsParam.videoParam.bitRate = 8'000'000;
-    _settingsParam.videoParam.fps = 30;
-    _settingsParam.videoParam.name = Encoder<MediaType::VIDEO>::GetUsableEncoders().front();
-    _settingsParam.outputDir = ".";
-    _settingsParam.liveUrl = "rtmp://127.0.0.1:1935";
-    _settingsParam.liveName = "stream";
-
-    glWidget = new OpenGLVideoWidget(this);
-
-    auto layout = new QVBoxLayout;
-    auto hLayout = new QHBoxLayout;
-
-    hLayout->addLayout(_InitAudioUi(), 2);
-    hLayout->addLayout(_InitListUi(), 2);
-    hLayout->addLayout(_InitOtherUi(), 1);
-    _InitStatusBarUi();
-
-    layout->addWidget(glWidget, 4);
-    layout->addLayout(hLayout, 1);
-    setLayout(layout);
-    _UpdateCaptureList();
-
-    _InitConnect();
-}
-
-void AvRecorder::_InitConnect()
-{
-    // 启动
-    auto timer = new QTimer(this);
-    connect(timer, &QTimer::timeout, [this, timer] {
-        _isLocked = true;
-        _StopPreview();
-        _StopCapture();
-        _StartCapture(CaptureMethod::WGC);
-        _StartPreview();
-        _isLocked = false;
-        timer->stop();
-    });
-    timer->start(100);
-
-    connect(_recordBtn, &QPushButton::released, [this] {
-        if (!_isRecord) {
-            auto fileName = _settingsParam.outputDir;
-            if (fileName.back() != '\\') {
-                fileName.push_back('\\');
-            }
-            auto format = "mp4";
-            fileName += QDateTime::currentDateTime().toString("yyyy-MM-dd-hh-mm-ss").toStdString()
-                        + "." + format;
-            // fileName += std::string("test.") + format;
-            __CheckNo(_StartStream(fileName, format));
-            _liveBtn->setEnabled(false);
-            _recordBtn->setText("停止录制");
-        } else {
-            _StopStream();
-            _liveBtn->setEnabled(true);
-            _recordBtn->setText("开始录制");
-        }
-        _isRecord = !_isRecord;
-    });
-    connect(_liveBtn, &QPushButton::released, [this] {
-        if (!_isLive) {
-            auto fileName = _settingsParam.liveUrl + "/" + _settingsParam.liveName;
-            bool isRtsp = _settingsParam.liveUrl.find("rtsp") != std::string::npos;
-            __CheckNo(_StartStream(fileName, isRtsp ? "rtsp" : "flv"));
-            _recordBtn->setEnabled(false);
-            _liveBtn->setText("停止直播");
-        } else {
-            _StopStream();
-            _recordBtn->setEnabled(true);
-            _liveBtn->setText("开始直播");
-        }
-        _isLive = !_isLive;
-    });
-    connect(_microphoneWidget, &AudioWidget::SetVolumeScale, [this](float scale) {
-        m_audioRecorder.SetVolumeScale(scale, MICROPHONE_INDEX);
-    });
-    connect(_speakerWidget, &AudioWidget::SetVolumeScale, [this](float scale) {
-        m_audioRecorder.SetVolumeScale(scale, SPEAKER_INDEX);
-    });
-    connect(_updateListBtn, &QPushButton::released, [this] { _UpdateCaptureList(); });
-    connect(_captureListWidget, &QListWidget::currentTextChanged, [this](const QString& text) {
-        if (text.isEmpty() || _isLocked) {
-            return;
-        }
-        _isLocked = true;
-        _StopPreview();
-        _StopCapture();
-        _StartCapture(CaptureMethod::WGC);
-        _StartPreview();
-        _isLocked = false;
-    });
-    connect(_isDrawCursorBox, &QCheckBox::stateChanged, [this] {
-        m_videoRecorder.SetIsDrawCursor(_isDrawCursorBox->isChecked());
-    });
-    connect(_captureMethodBox, &QComboBox::currentTextChanged, [this](const QString& text) {
-        if (_isLocked || text.isEmpty()) {
-            return;
-        }
-        _StopPreview();
-        _StopCapture();
-        if (text == "WGC") {
-            _StartCapture(CaptureMethod::WGC);
-        } else if (text == "DXGI") {
-            _StartCapture(CaptureMethod::DXGI);
-        } else {
-            _StartCapture(CaptureMethod::GDI);
-        }
-        _StartPreview();
-    });
-    connect(_settingsBtn, &QPushButton::released, [this] {
-        auto settingsPage = std::make_unique<SettingsPage>(&_settingsParam, this);
-        settingsPage->exec();
-        _isLocked = true;
-        _StopPreview();
-        _StopCapture();
-        _StartCapture(CaptureMethod::WGC);
-        _StartPreview();
-        _isLocked = false;
-    });
-
-    _otherTimer.callOnTimeout([this] {
-        if (windowState() == Qt::WindowMinimized) {
-            return;
-        }
-        // 音频
-        auto info = m_audioRecorder.GetCaptureInfo(MICROPHONE_INDEX);
-        _microphoneWidget->ShowVolume(info == nullptr ? 0 : info->volume);
-        info = m_audioRecorder.GetCaptureInfo(SPEAKER_INDEX);
-        _speakerWidget->ShowVolume(info == nullptr ? 0 : info->volume);
-        // 状态栏
-        if (_isRecord || _isLive) {
-            int interval = _recordTime.secsTo(QTime::currentTime());
-            int sec = interval % 60;
-            interval /= 60;
-            int minute = interval % 60;
-            int hour = interval / 60;
-            _captureTimeLabel->setText(QString("%1:%2:%3")
-                                           .arg(hour, 2, 10, QChar('0'))
-                                           .arg(minute, 2, 10, QChar('0'))
-                                           .arg(sec, 2, 10, QChar('0')));
-            auto lossRate = m_videoRecorder.GetLossRate();
-            int num = lossRate * 10000;
-            _videolossRate->setText(QString("丢帧率: %1.%2%")
-                                        .arg(num / 100, 2, 10, QChar('0'))
-                                        .arg(num % 100, 2, 10, QChar('0')));
-        } else if (_captureTimeLabel->text() != "00:00:00") {
-            _captureTimeLabel->setText("00:00:00");
-        }
-    });
-}
-
-AvRecorder::~AvRecorder()
-{
-    _StopStream();
-    _StopPreview();
-    _StopCapture();
-}
-
-void AvRecorder::_StartCapture(CaptureMethod method)
-{
-    if (_isLocked) {
-        _captureMethodBox->clear();
-        _captureMethodBox->addItem("WGC");
-    }
-
-    // 判断是要捕获屏幕还是窗口
-    int idx = _captureListWidget->currentRow();
-    if (idx < 0) {
-        idx = 0;
-        _captureListWidget->setCurrentRow(idx);
-    }
-
-    int monitorCnt = (int) MonitorFinder::GetList().size();
-    if (idx < monitorCnt) { // 捕获屏幕
-        if (_captureMethodBox->count() < 2) {
-            _captureMethodBox->addItem("DXGI");
-        }
-
-        VideoCaptureManager capturer;
-        CaptureTarget target;
-        target.type = CaptureTargetType::Monitor;
-        target.monitorIdx = idx;
-        int width = 1920; // 设置宽度
-        int height = 1080; // 设置高度
-        if (capturer.open(target, method, width, height)) {
-            AVFrame* frame = capturer.getFrame();
-            // ...处理 frame ...
-            capturer.close();
-        }
-
-    } else {
-        if (_captureMethodBox->count() < 2) {
-            _captureMethodBox->addItem("GDI");
-        }
-        auto hwnd = WindowFinder::GetList()[idx - monitorCnt].hwnd;
-
-        VideoCaptureManager capturer;
-        CaptureTarget target;
-        target.type = CaptureTargetType::Window;
-        target.hwnd = hwnd;
-        int width = 1920; // 设置宽度
-        int height = 1080; // 设置高度
-        if (capturer.open(target, method, width, height)) {
-            AVFrame* frame = capturer.getFrame();
-            // ...处理 frame ...
-            capturer.close();
-        }
-    }
-    _DealCapture();
-    _isDrawCursorBox->setEnabled(true);
-    _recordBtn->setEnabled(true);
-    _liveBtn->setEnabled(true);
-    m_videoRecorder.SetIsDrawCursor(_isDrawCursorBox->isChecked());
-    m_audioRecorder.SetVolumeScale(_microphoneWidget->GetVolume(), MICROPHONE_INDEX);
-    m_audioRecorder.SetVolumeScale(_speakerWidget->GetVolume(), SPEAKER_INDEX);
-}
-
-void AvRecorder::_DealCapture()
-{
-    __CheckNo(m_audioRecorder.Open({AudioCapturer::Microphone, AudioCapturer::Speaker},
-                                   _settingsParam.audioParam));
-    _microphoneWidget->setEnabled(m_audioRecorder.GetCaptureInfo(MICROPHONE_INDEX) != nullptr);
-    _speakerWidget->setEnabled(m_audioRecorder.GetCaptureInfo(SPEAKER_INDEX) != nullptr);
-    _fpsLabel->setText(QString("FPS: %1").arg(_settingsParam.videoParam.fps));
-    _videoEncodeLabel->setText(("编码器: " + _settingsParam.videoParam.name).c_str());
-}
-
-void AvRecorder::_StopCapture()
-{
-    m_videoRecorder.Close();
-    m_audioRecorder.Close();
-}
-
-void AvRecorder::_StartPreview()
-{
-    glWidget->Open(_settingsParam.videoParam.width, _settingsParam.videoParam.height);
-
-    // __CheckNo(_videoRender.Open(_videoWidget->GetHwnd(),
-    //                             _settingsParam.videoParam.width,
-    //                             _settingsParam.videoParam.height));
-    // _videoWidget->SetScaleFixSize(_settingsParam.videoParam.width, _settingsParam.videoParam.height);
-    // _videoWidget->setAttribute(Qt::WA_TransparentForMouseEvents, true);
-    // 视频需要做到和帧率一样的渲染速度,QTimer 达不到要求
-    // 需要自己封装一个计时器
-    _videoRenderTimer.Start(_settingsParam.videoParam.fps, [this] {
-        if (windowState() == Qt::WindowMinimized) {
-            return;
-        }
-        // 视频
-        auto frame = m_videoRecorder.GetRenderFrame();
-        // __CheckNo(_videoRender.Render(frame));
-        glWidget->Render(frame);
-    });
-
-    // 刷新率设置为 25
-    _otherTimer.start(40);
-}
-
-void AvRecorder::_StopPreview()
-{
-    _videoRenderTimer.Stop();
-    // _videoRender.Close();
-    _otherTimer.stop();
-}
-
-bool AvRecorder::_StartStream(std::string_view path, std::string_view format)
-{
-    __CheckBool(_avMuxer.Open(path, format));
-    __CheckBool(m_audioRecorder.LoadMuxer(_avMuxer));
-    __CheckBool(m_videoRecorder.LoadMuxer(_avMuxer));
-    __CheckBool(_avMuxer.WriteHeader());
-    __CheckBool(m_audioRecorder.StartRecord());
-    __CheckBool(m_videoRecorder.StartRecord());
-    _recordTime = QTime::currentTime();
-    _captureStatusLabel->setText("状态: 正在工作");
-    _settingsBtn->setEnabled(false);
-    _captureListWidget->setEnabled(false);
-    _updateListBtn->setEnabled(false);
-    _captureMethodBox->setEnabled(false);
-    return true;
-}
-
-void AvRecorder::_StopStream()
-{
-    m_audioRecorder.StopRecord();
-    m_videoRecorder.StopRecord();
-    _avMuxer.Close();
-    _captureStatusLabel->setText("状态: 正常");
-    _settingsBtn->setEnabled(true);
-    _captureListWidget->setEnabled(true);
-    _updateListBtn->setEnabled(true);
-    _captureMethodBox->setEnabled(true);
-}
-
-void AvRecorder::_UpdateCaptureList()
-{
-    _captureListWidget->clear();
-    auto&& monitorList = MonitorFinder::GetList(true);
-    for (auto&& monitor : monitorList) {
-        _captureListWidget->addItem("屏幕: " + QString::fromStdWString(monitor.title));
-    }
-    auto&& windowList = WindowFinder::GetList(true);
-    for (auto&& window : windowList) {
-        _captureListWidget->addItem("窗口: " + QString::fromStdWString(window.title));
-    }
-    // _captureListWidget->hide();
-    // _updateListBtn->hide();
-}
-
-
-QVBoxLayout* AvRecorder::_InitListUi()
-{
-    auto layout = new QVBoxLayout;
-    _captureListWidget = new QListWidget;
-    layout->addWidget(_captureListWidget);
-    return layout;
-}
-
-QVBoxLayout* AvRecorder::_InitAudioUi()
-{
-    _microphoneWidget = new AudioWidget;
-    _speakerWidget = new AudioWidget;
-    _microphoneWidget->SetName("麦克风");
-    _speakerWidget->SetName("扬声器");
-    auto layout = new QVBoxLayout;
-    layout->addWidget(_microphoneWidget);
-    layout->addWidget(_speakerWidget);
-    return layout;
-}
-
-QVBoxLayout* AvRecorder::_InitOtherUi()
-{
-    _isDrawCursorBox = new QCheckBox("绘制鼠标指针");
-    _isDrawCursorBox->setChecked(true);
-    _isDrawCursorBox->setEnabled(false);
-    _updateListBtn = new QPushButton("刷新窗口列表");
-    _recordBtn = new QPushButton("开始录制");
-    _recordBtn->setEnabled(false);
-    _liveBtn = new QPushButton("开始直播");
-    _liveBtn->setEnabled(false);
-    _settingsBtn = new QPushButton("设置");
-    auto layout = new QVBoxLayout;
-    layout->addWidget(_isDrawCursorBox);
-    layout->addWidget(_updateListBtn);
-    layout->addWidget(_recordBtn);
-    layout->addWidget(_liveBtn);
-    layout->addWidget(_settingsBtn);
-    return layout;
-}
-
-void AvRecorder::_InitStatusBarUi()
-{
-    _videoEncodeLabel = new QLabel;
-    auto hLayout = new QHBoxLayout;
-    hLayout->addWidget(new QLabel("捕获方式:"));
-    _captureMethodBox = new QComboBox;
-    hLayout->addWidget(_captureMethodBox);
-    _captureStatusLabel = new QLabel("状态: 正常");
-    _captureTimeLabel = new QLabel("00:00:00");
-    _videolossRate = new QLabel("丢帧率: 00.00%");
-    _fpsLabel = new QLabel("FPS: 30");
-    // auto statusBar = this->statusBar();
-    // statusBar->layout()->setSpacing(20);
-    // statusBar->layout()->addWidget(_videoEncodeLabel);
-    auto widget = new QWidget;
-    widget->setLayout(hLayout);
-    // statusBar->layout()->addWidget(widget);
-    // statusBar->layout()->addWidget(_videolossRate);
-    // statusBar->layout()->addWidget(_captureStatusLabel);
-    // statusBar->layout()->addWidget(_captureTimeLabel);
-    // statusBar->layout()->addWidget(_fpsLabel);
-}

+ 0 - 67
AvRecorder/test/ui/av_recorder.h

@@ -1,67 +0,0 @@
-#pragma once
-
-#include <QCheckBox>
-#include <QLayout>
-#include <QListWidget>
-#include <QPushButton>
-#include <QTime>
-#include <QTimer>
-#include <QWidget>
-
-#include "audio_widget.h"
-#include "recorder/audio_recorder.h"
-#include "recorder/video_recorder.h"
-#include "ui/opengl_video_widget.h"
-#include "ui/settings_page.h"
-#include "avrecorder/capturer/video/VideoCaptureManager.h"
-using namespace avrecorder::video;
-
-class AvRecorder : public QWidget
-{
-    Q_OBJECT
-public:
-    AvRecorder(QWidget* parent = nullptr);
-    ~AvRecorder();
-
-private:
-    AudioRecorder m_audioRecorder;
-    VideoRecorder m_videoRecorder;
-    AvMuxer _avMuxer;
-    // VideoRender _videoRender;
-    OpenGLVideoWidget* glWidget;
-    AudioWidget* _microphoneWidget = nullptr;
-    AudioWidget* _speakerWidget = nullptr;
-    QPushButton* _recordBtn = nullptr;
-    QPushButton* _liveBtn = nullptr;
-    QPushButton* _settingsBtn = nullptr;
-    QCheckBox* _isDrawCursorBox = nullptr;
-    Timer _videoRenderTimer;
-    QTimer _otherTimer;
-    QListWidget* _captureListWidget = nullptr;
-    QPushButton* _updateListBtn = nullptr;
-    bool _isRecord = false;
-    bool _isLive = false;
-    void _InitUi();
-    QComboBox* _captureMethodBox = nullptr;
-    QLabel* _captureStatusLabel = nullptr;
-    QLabel* _captureTimeLabel = nullptr;
-    QLabel* _fpsLabel = nullptr;
-    QLabel* _videoEncodeLabel = nullptr;
-    QLabel* _videolossRate = nullptr;
-    SettingsPage::Param _settingsParam;
-    QVBoxLayout* _InitListUi();
-    QVBoxLayout* _InitAudioUi();
-    QVBoxLayout* _InitOtherUi();
-    QTime _recordTime;
-    bool _isLocked = false;
-    void _InitStatusBarUi();
-    void _UpdateCaptureList();
-    void _StartCapture(CaptureMethod method);
-    void _StopCapture();
-    void _StartPreview();
-    void _DealCapture();
-    void _StopPreview();
-    bool _StartStream(std::string_view path, std::string_view format);
-    void _StopStream();
-    void _InitConnect();
-};

+ 0 - 341
AvRecorder/test/ui/opengl_video_widget.cpp

@@ -1,341 +0,0 @@
-#include "opengl_video_widget.h"
-#include <QDebug>
-
-OpenGLVideoWidget::OpenGLVideoWidget(QWidget* parent)
-    : QOpenGLWidget(parent)
-    , m_program(nullptr)
-    , m_textureId(0)
-    , m_frameData(nullptr)
-    , m_frameWidth(0)
-    , m_frameHeight(0)
-    , m_frameFormat(0)
-    , m_frameUpdated(false)
-    , m_initialized(false)
-{
-    // 设置顶点坐标
-    m_vertices[0] = -1.0f; m_vertices[1] = -1.0f;
-    m_vertices[2] = 1.0f;  m_vertices[3] = -1.0f;
-    m_vertices[4] = -1.0f; m_vertices[5] = 1.0f;
-    m_vertices[6] = 1.0f;  m_vertices[7] = 1.0f;
-
-    // 设置纹理坐标
-    m_texCoords[0] = 0.0f; m_texCoords[1] = 1.0f;
-    m_texCoords[2] = 1.0f; m_texCoords[3] = 1.0f;
-    m_texCoords[4] = 0.0f; m_texCoords[5] = 0.0f;
-    m_texCoords[6] = 1.0f; m_texCoords[7] = 0.0f;
-}
-
-OpenGLVideoWidget::~OpenGLVideoWidget()
-{
-    Close();
-}
-
-bool OpenGLVideoWidget::Open(unsigned int width, unsigned int height)
-{
-    QMutexLocker locker(&m_mutex);
-    
-    m_frameWidth = width;
-    m_frameHeight = height;
-    
-    // 如果已经有数据,释放它
-    if (m_frameData) {
-        delete[] m_frameData;
-    }
-    
-    // 分配新的内存
-    m_frameData = new unsigned char[width * height * 4]; // RGBA格式
-    memset(m_frameData, 0, width * height * 4);
-    
-    return true;
-}
-
-void OpenGLVideoWidget::Close()
-{
-    makeCurrent();
-    if (m_textureId) {
-        glDeleteTextures(1, &m_textureId);
-        m_textureId = 0;
-    }
-    if (m_program) {
-        delete m_program;
-        m_program = nullptr;
-    }
-    doneCurrent();
-    
-    // 释放帧数据
-    QMutexLocker locker(&m_mutex);
-    if (m_frameData) {
-        delete[] m_frameData;
-        m_frameData = nullptr;
-    }
-    
-    m_frameWidth = 0;
-    m_frameHeight = 0;
-    m_frameUpdated = false;
-    m_initialized = false;
-}
-
-void OpenGLVideoWidget::initializeGL()
-{
-    initializeOpenGLFunctions();
-    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
-
-    // 创建着色器程序
-    m_program = new QOpenGLShaderProgram();
-    m_program->addShaderFromSourceCode(QOpenGLShader::Vertex,
-        "attribute vec2 vertexIn;\n"
-        "attribute vec2 textureIn;\n"
-        "varying vec2 textureOut;\n"
-        "void main(void)\n"
-        "{\n"
-        "    gl_Position = vec4(vertexIn, 0.0, 1.0);\n"
-        "    textureOut = textureIn;\n"
-        "}\n");
-    m_program->addShaderFromSourceCode(QOpenGLShader::Fragment,
-        "varying vec2 textureOut;\n"
-        "uniform sampler2D texture;\n"
-        "void main(void)\n"
-        "{\n"
-        "    gl_FragColor = texture2D(texture, textureOut);\n"
-        "}\n");
-    m_program->bindAttributeLocation("vertexIn", 0);
-    m_program->bindAttributeLocation("textureIn", 1);
-    m_program->link();
-    
-    // 创建纹理
-    glGenTextures(1, &m_textureId);
-    glBindTexture(GL_TEXTURE_2D, m_textureId);
-    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
-    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
-    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
-    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
-    glBindTexture(GL_TEXTURE_2D, 0);
-    
-    m_initialized = true;
-}
-
-void OpenGLVideoWidget::resizeGL(int width, int height)
-{
-    glViewport(0, 0, width, height);
-}
-
-void OpenGLVideoWidget::paintGL()
-{
-    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
-    
-    QMutexLocker locker(&m_mutex);
-    
-    if (!m_frameData || m_frameWidth <= 0 || m_frameHeight <= 0 || !m_frameUpdated)
-        return;
-    
-    // 绑定纹理并更新数据
-    glBindTexture(GL_TEXTURE_2D, m_textureId);
-    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, m_frameWidth, m_frameHeight, 
-                0, GL_RGBA, GL_UNSIGNED_BYTE, m_frameData);
-    
-    // 使用着色器程序
-    m_program->bind();
-    
-    // 设置纹理单元
-    m_program->setUniformValue("texture", 0);
-    
-    // 设置顶点和纹理坐标
-    m_program->enableAttributeArray(0);
-    m_program->enableAttributeArray(1);
-    m_program->setAttributeArray(0, m_vertices, 2);
-    m_program->setAttributeArray(1, m_texCoords, 2);
-    
-    // 绘制
-    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
-    
-    // 清理
-    m_program->disableAttributeArray(0);
-    m_program->disableAttributeArray(1);
-    m_program->release();
-    glBindTexture(GL_TEXTURE_2D, 0);
-}
-
-void OpenGLVideoWidget::updateFrame(const VideoFrame& frame)
-{
-    if (!frame.data || frame.width <= 0 || frame.height <= 0)
-        return;
-    
-    QMutexLocker locker(&m_mutex);
-    
-    // 如果尺寸变化,重新分配内存
-    if (m_frameWidth != frame.width || m_frameHeight != frame.height) {
-        if (m_frameData) {
-            delete[] m_frameData;
-        }
-        
-        m_frameWidth = frame.width;
-        m_frameHeight = frame.height;
-        m_frameData = new unsigned char[m_frameWidth * m_frameHeight * 4]; // RGBA格式
-    }
-    
-    // 复制帧数据
-    memcpy(m_frameData, frame.data, m_frameWidth * m_frameHeight * 4);
-    m_frameUpdated = true;
-    
-    // 请求重绘
-    update();
-}
-
-bool OpenGLVideoWidget::convertFromAVFrame(AVFrame* frame)
-{
-    if (!frame || frame->width <= 0 || frame->height <= 0)
-        return false;
-
-    QMutexLocker locker(&m_mutex);
-
-    // 如果尺寸变化,重新分配内存
-    if (m_frameWidth != frame->width || m_frameHeight != frame->height) {
-        if (m_frameData) {
-            delete[] m_frameData;
-        }
-
-        m_frameWidth = frame->width;
-        m_frameHeight = frame->height;
-        m_frameData = new unsigned char[m_frameWidth * m_frameHeight * 4]; // RGBA格式
-    }
-
-    // 根据不同的像素格式进行转换
-    switch (frame->format) {
-    case AV_PIX_FMT_RGBA: {
-        // 直接复制RGBA数据
-        for (int y = 0; y < frame->height; y++) {
-            memcpy(m_frameData + y * m_frameWidth * 4,
-                   frame->data[0] + y * frame->linesize[0],
-                   frame->width * 4);
-        }
-    } break;
-    case AV_PIX_FMT_RGB24: {
-        // RGB24转RGBA
-        for (int y = 0; y < frame->height; y++) {
-            uint8_t* src = frame->data[0] + y * frame->linesize[0];
-            uint8_t* dst = m_frameData + y * m_frameWidth * 4;
-
-            for (int x = 0; x < frame->width; x++) {
-                *dst++ = *src++; // R
-                *dst++ = *src++; // G
-                *dst++ = *src++; // B
-                *dst++ = 255;    // A
-            }
-        }
-    } break;
-    case AV_PIX_FMT_BGR0:
-    case AV_PIX_FMT_BGRA: {
-        // BGRA转RGBA
-        for (int y = 0; y < frame->height; y++) {
-            uint8_t* src = frame->data[0] + y * frame->linesize[0];
-            uint8_t* dst = m_frameData + y * m_frameWidth * 4;
-
-            for (int x = 0; x < frame->width; x++) {
-                uint8_t b = *src++;
-                uint8_t g = *src++;
-                uint8_t r = *src++;
-                uint8_t a = *src++;
-
-                *dst++ = r;
-                *dst++ = g;
-                *dst++ = b;
-                *dst++ = a;
-            }
-        }
-    } break;
-    case AV_PIX_FMT_YUV420P: // 添加对YUV420P格式的支持
-    {
-        // YUV420P转RGBA
-        for (int y = 0; y < frame->height; y++) {
-            uint8_t* dst = m_frameData + y * m_frameWidth * 4;
-
-            for (int x = 0; x < frame->width; x++) {
-                int Y = frame->data[0][y * frame->linesize[0] + x];
-                int U = frame->data[1][(y / 2) * frame->linesize[1] + (x / 2)];
-                int V = frame->data[2][(y / 2) * frame->linesize[2] + (x / 2)];
-
-                // YUV转RGB公式
-                int C = Y - 16;
-                int D = U - 128;
-                int E = V - 128;
-
-                int R = (298 * C + 409 * E + 128) >> 8;
-                int G = (298 * C - 100 * D - 208 * E + 128) >> 8;
-                int B = (298 * C + 516 * D + 128) >> 8;
-
-                // 限制RGB值在0-255范围内
-                R = R < 0 ? 0 : (R > 255 ? 255 : R);
-                G = G < 0 ? 0 : (G > 255 ? 255 : G);
-                B = B < 0 ? 0 : (B > 255 ? 255 : B);
-
-                *dst++ = R;   // R
-                *dst++ = G;   // G
-                *dst++ = B;   // B
-                *dst++ = 255; // A
-            }
-        }
-    } break;
-    case AV_PIX_FMT_NV12: {
-        // NV12转RGBA
-        for (int y = 0; y < frame->height; y++) {
-            uint8_t* dst = m_frameData + y * m_frameWidth * 4;
-
-            for (int x = 0; x < frame->width; x++) {
-                int Y = frame->data[0][y * frame->linesize[0] + x];
-                int U = frame->data[1][(y / 2) * frame->linesize[1] + (x / 2) * 2];
-                int V = frame->data[1][(y / 2) * frame->linesize[1] + (x / 2) * 2 + 1];
-
-                // YUV转RGB公式
-                int C = Y - 16;
-                int D = U - 128;
-                int E = V - 128;
-
-                int R = (298 * C + 409 * E + 128) >> 8;
-                int G = (298 * C - 100 * D - 208 * E + 128) >> 8;
-                int B = (298 * C + 516 * D + 128) >> 8;
-
-                // 限制RGB值在0-255范围内
-                R = R < 0 ? 0 : (R > 255 ? 255 : R);
-                G = G < 0 ? 0 : (G > 255 ? 255 : G);
-                B = B < 0 ? 0 : (B > 255 ? 255 : B);
-
-                *dst++ = R;   // R
-                *dst++ = G;   // G
-                *dst++ = B;   // B
-                *dst++ = 255; // A
-            }
-        }
-    } break;
-    default:
-        // 对于其他格式,可以考虑使用FFmpeg的sws_scale函数
-        qDebug() << "Unsupported pixel format:" << frame->format;
-        return false;
-    }
-
-    m_frameUpdated = true;
-    update();
-    return true;
-}
-
-bool OpenGLVideoWidget::Render(AVFrame* frame)
-{
-    if (!m_initialized && isValid()) {
-        makeCurrent();
-        initializeGL();
-        doneCurrent();
-    }
-    
-    if (!frame) {
-        update(); // 仅刷新显示
-        return true;
-    }
-    
-    return convertFromAVFrame(frame);
-}
-
-void OpenGLVideoWidget::clearFrame()
-{
-    QMutexLocker locker(&m_mutex);
-    m_frameUpdated = false;
-    update();
-}

+ 0 - 59
AvRecorder/test/ui/opengl_video_widget.h

@@ -1,59 +0,0 @@
-#pragma once
-
-#include <QMutex>
-#include <QOpenGLFunctions>
-#include <QOpenGLShaderProgram>
-#include <QOpenGLTexture>
-#include <QOpenGLWidget>
-#include "basic/frame.h" // 添加对 AVFrame 的支持
-
-// 定义 VideoFrame 结构体
-struct VideoFrame {
-    unsigned char* data;
-    int width;
-    int height;
-    int format;
-};
-
-class OpenGLVideoWidget : public QOpenGLWidget, protected QOpenGLFunctions
-{
-    Q_OBJECT
-public:
-    explicit OpenGLVideoWidget(QWidget* parent = nullptr);
-    ~OpenGLVideoWidget();
-
-    // 添加与 VideoRender 类似的接口
-    bool Open(unsigned int width, unsigned int height);
-    void Close();
-    bool Render(AVFrame* frame);
-    
-    // 原有接口
-    void updateFrame(const VideoFrame& frame);
-    void clearFrame();
-    
-    // 添加从 AVFrame 转换的方法
-    bool convertFromAVFrame(AVFrame* frame);
-
-protected:
-    void initializeGL() override;
-    void paintGL() override;
-    void resizeGL(int width, int height) override;
-
-private:
-    QOpenGLShaderProgram* m_program;
-    GLuint m_textureId;  // 使用原生OpenGL纹理ID
-    
-    // 存储原始帧数据
-    unsigned char* m_frameData;
-    int m_frameWidth;
-    int m_frameHeight;
-    int m_frameFormat; // 可以用来表示像素格式
-    
-    QMutex m_mutex;
-    bool m_frameUpdated;
-    bool m_initialized;
-    
-    // 顶点和纹理坐标
-    GLfloat m_vertices[8];
-    GLfloat m_texCoords[8];
-};

+ 0 - 146
AvRecorder/test/ui/settings_page.cpp

@@ -1,146 +0,0 @@
-
-
-#include "settings_page.h"
-#include "encoder/video_encoder.h"
-#include <QFileDialog>
-
-SettingsPage::SettingsPage(Param* param, QWidget* parent)
-    : QDialog(parent)
-    , _param(param)
-{
-    setFont(QFont("Microsoft Yahei"));
-    _InitUi();
-    _InitConnect();
-}
-
-void SettingsPage::_InitConnect()
-{
-    connect(_applyBtn, &QPushButton::released, [this] {
-        _WriteSettings();
-    });
-
-    connect(_cancelBtn, &QPushButton::released, [this] {
-        this->close();
-    });
-
-    connect(_yesBtn, &QPushButton::released, [this] {
-        _WriteSettings();
-        this->close();
-    });
-
-    connect(_selDirBtn, &QPushButton::released, [this] {
-        QString selectedDir = QFileDialog::getExistingDirectory(this, "选择输出目录", "./", QFileDialog::ShowDirsOnly);
-        // 若目录路径不为空
-        if (!selectedDir.isEmpty()) {
-            // 显示选择的目录路径
-            _fileDirEdit->setText(selectedDir);
-        }
-    });
-}
-
-void SettingsPage::_WriteSettings()
-{
-    _param->videoParam.bitRate = _videoBitRateBox->value() * 1000;
-    _param->videoParam.fps = _videoFpsBox->value();
-    _param->videoParam.name = _videoEncoderBox->currentText().toStdString();
-    _param->audioParam.bitRate = _audioBitRateBox->value() * 1000;
-    _param->outputDir = _fileDirEdit->text().toStdString();
-    _param->liveUrl = _liveUrlEdit->text().toStdString();
-    _param->liveName = _liveNameEdit->text().toStdString();
-}
-
-void SettingsPage::_InitUi()
-{
-    setWindowTitle("Settings");
-    auto layout = new QVBoxLayout;
-    layout->addWidget(_InitVideoUi());
-    layout->addWidget(_InitAudioUi());
-    layout->addWidget(_InitOutputUi());
-    layout->addWidget(_InitLiveUi());
-    auto hLayout = new QHBoxLayout;
-    _applyBtn = new QPushButton("应用");
-    _cancelBtn = new QPushButton("取消");
-    _yesBtn = new QPushButton("确定");
-    hLayout->setAlignment(Qt::AlignRight);
-    hLayout->addWidget(_applyBtn);
-    hLayout->addWidget(_cancelBtn);
-    hLayout->addWidget(_yesBtn);
-    layout->addLayout(hLayout);
-    setLayout(layout);
-}
-
-QGroupBox* SettingsPage::_InitVideoUi()
-{
-    auto groupBox = new QGroupBox("视频");
-    auto layout = new QVBoxLayout;
-    _videoBitRateBox = new QSpinBox;
-    _videoBitRateBox->setMinimum(0);
-    _videoBitRateBox->setMaximum(INT_MAX);
-    _videoBitRateBox->setValue(_param->videoParam.bitRate / 1000);
-    _videoFpsBox = new QSpinBox;
-    _videoFpsBox->setMinimum(0);
-    _videoFpsBox->setMaximum(60);
-    _videoFpsBox->setValue(_param->videoParam.fps);
-    _videoEncoderBox = new QComboBox;
-    auto&& encoders = Encoder<MediaType::VIDEO>::GetUsableEncoders();
-    for (auto&& encoder : encoders) {
-        _videoEncoderBox->addItem(encoder.c_str());
-    }
-    _videoEncoderBox->setCurrentText(_param->videoParam.name.c_str());
-    layout->addLayout(_CreateDescription("比特率(kB):", "越高的比特率越清晰, 但越占用硬件资源", _videoBitRateBox));
-    layout->addLayout(_CreateDescription("帧率:", "越高的帧率越流畅, 但越占用硬件资源", _videoFpsBox));
-    layout->addLayout(_CreateDescription("编码器:", "libx264 为软件编码, CPU占用高但兼容性强, 其他为硬件编码, 效果与软件编码相反", _videoEncoderBox));
-    groupBox->setLayout(layout);
-    return groupBox;
-}
-QGroupBox* SettingsPage::_InitAudioUi()
-{
-    auto groupBox = new QGroupBox("音频");
-    auto layout = new QVBoxLayout;
-    _audioBitRateBox = new QSpinBox;
-    _audioBitRateBox->setMinimum(0);
-    _audioBitRateBox->setMaximum(INT_MAX);
-    _audioBitRateBox->setValue(_param->audioParam.bitRate / 1000);
-    layout->addLayout(_CreateDescription("比特率(kB):", "越高的比特率越清晰, 但越占用硬件资源", _audioBitRateBox));
-    groupBox->setLayout(layout);
-    return groupBox;
-}
-
-QGroupBox* SettingsPage::_InitOutputUi()
-{
-    auto groupBox = new QGroupBox("输出");
-    auto layout = new QHBoxLayout;
-    _fileDirEdit = new QLineEdit(_param->outputDir.c_str());
-    _selDirBtn = new QPushButton("选择");
-    layout->addWidget(_fileDirEdit);
-    layout->addWidget(_selDirBtn);
-    groupBox->setLayout(layout);
-    return groupBox;
-}
-
-QGroupBox* SettingsPage::_InitLiveUi()
-{
-    auto groupBox = new QGroupBox("直播");
-    auto layout = new QVBoxLayout;
-    _liveUrlEdit = new QLineEdit(_param->liveUrl.c_str());
-    _liveNameEdit = new QLineEdit(_param->liveName.c_str());
-    auto liveUrlLayout = new QHBoxLayout();
-    liveUrlLayout->addWidget(new QLabel("地址:"));
-    liveUrlLayout->addWidget(_liveUrlEdit);
-    auto liveNameLayout = new QHBoxLayout();
-    liveNameLayout->addWidget(new QLabel("名称(密钥):"));
-    liveNameLayout->addWidget(_liveNameEdit);
-    layout->addLayout(liveUrlLayout);
-    layout->addLayout(liveNameLayout);
-    groupBox->setLayout(layout);
-    return groupBox;
-}
-QHBoxLayout* SettingsPage::_CreateDescription(std::string_view text, std::string_view textEx, QWidget* widget)
-{
-    auto layout = new QHBoxLayout;
-    auto label = new QLabel(text.data());
-    label->setToolTip(textEx.data());
-    layout->addWidget(label);
-    layout->addWidget(widget);
-    return layout;
-}

+ 0 - 50
AvRecorder/test/ui/settings_page.h

@@ -1,50 +0,0 @@
-#ifndef __SETTINGS_H__
-#define __SETTINGS_H__
-
-#include "encoder/audio_encoder.h"
-#include "encoder/video_encoder.h"
-#include <QComboBox>
-#include <QDialog>
-#include <QGroupBox>
-#include <QLabel>
-#include <QLayout>
-#include <QLineEdit>
-#include <QPushButton>
-#include <QSpinBox>
-
-class SettingsPage : public QDialog {
-public:
-    struct Param {
-        Encoder<MediaType::AUDIO>::Param audioParam;
-        Encoder<MediaType::VIDEO>::Param videoParam;
-        std::string outputDir;
-        std::string liveUrl;
-        std::string liveName;
-    };
-    SettingsPage(Param* param, QWidget* parent = nullptr);
-
-private:
-    void _InitUi();
-    void _InitConnect();
-    void _WriteSettings();
-    QGroupBox* _InitVideoUi();
-    QGroupBox* _InitAudioUi();
-    QGroupBox* _InitOutputUi();
-    QGroupBox* _InitLiveUi();
-    Param* _param = nullptr;
-    QSpinBox* _videoBitRateBox = nullptr;
-    QSpinBox* _videoFpsBox = nullptr;
-    QComboBox* _videoEncoderBox = nullptr;
-    QSpinBox* _audioBitRateBox = nullptr;
-    QLineEdit* _fileDirEdit = nullptr;
-    QLineEdit* _liveUrlEdit = nullptr;
-    QLineEdit* _liveNameEdit = nullptr;
-    QPushButton* _selDirBtn = nullptr;
-    QPushButton* _applyBtn = nullptr;
-    QPushButton* _cancelBtn = nullptr;
-    QPushButton* _yesBtn = nullptr;
-
-    QHBoxLayout* _CreateDescription(std::string_view text, std::string_view textEx, QWidget* widget);
-};
-
-#endif

BIN
bin/2025-07-12-20-11-39.mp4


BIN
bin/2025-07-12-20-11-58.mp4