Kaynağa Gözat

扬声器录制

zhuizhu 9 ay önce
ebeveyn
işleme
5802b9b75f

+ 4 - 2
AvRecorder/capturer/audio/audio.pri

@@ -1,9 +1,11 @@
 HEADERS += \
     $$PWD/audio_capturer.h \
     $$PWD/audio_qt_capturer.h \
-    $$PWD/iaudiocapturer.h
+    $$PWD/iaudiocapturer.h \
+    $$PWD/wasapi_loopback_capturer.h
 
 SOURCES += \
     $$PWD/audio_capturer.cpp \
     $$PWD/audio_qt_capturer.cpp \
-    $$PWD/iaudiocapturer.cpp
+    $$PWD/iaudiocapturer.cpp \
+    $$PWD/wasapi_loopback_capturer.cpp

+ 340 - 0
AvRecorder/capturer/audio/wasapi_loopback_capturer.cpp

@@ -0,0 +1,340 @@
+#include "wasapi_loopback_capturer.h"
+#include <audioclient.h>
+#include <comdef.h>
+#include <mmdeviceapi.h>
+#include <windows.h>
+#include <QDebug>
+
+#define DEFAULT_SAMPLE_RATE 48000  // 默认采样率:48kHz
+#define DEFAULT_BITS_PER_SAMPLE 16 // 默认位深:16bit
+#define DEFAULT_CHANNELS 1         // 默认音频通道数:1
+
+#undef min
+
+class WASAPILoopbackCapturerPrivate
+{
+public:
+    WASAPILoopbackCapturerPrivate() { CoInitialize(nullptr); }
+    ~WASAPILoopbackCapturerPrivate()
+    {
+        if (pwfx)
+            CoTaskMemFree(pwfx);
+        if (pCaptureClient)
+            pCaptureClient->Release();
+        if (pAudioClient)
+            pAudioClient->Release();
+        if (pDevice)
+            pDevice->Release();
+        if (pEnumerator)
+            pEnumerator->Release();
+        CoUninitialize();
+    }
+
+    bool init()
+    {
+        HRESULT hr;
+        hr = CoCreateInstance(__uuidof(MMDeviceEnumerator),
+                              nullptr,
+                              CLSCTX_ALL,
+                              __uuidof(IMMDeviceEnumerator),
+                              (void**) &pEnumerator);
+        if (FAILED(hr)) {
+            qDebug() << "Failed to create MMDeviceEnumerator";
+            return false;
+        }
+
+        hr = pEnumerator->GetDefaultAudioEndpoint(eRender, eConsole, &pDevice);
+        if (FAILED(hr)) {
+            qDebug() << "Failed to get default audio endpoint";
+            return false;
+        }
+
+        hr = pDevice->Activate(__uuidof(IAudioClient), CLSCTX_ALL, nullptr, (void**) &pAudioClient);
+        if (FAILED(hr)) {
+            qDebug() << "Failed to activate audio client";
+            return false;
+        }
+        return true;
+    }
+    // 检查并获取首选格式
+    bool getPreferredFormat() {
+        HRESULT hr = pAudioClient->GetMixFormat(&pwfx);
+        if (FAILED(hr)) {
+            qDebug() << "Failed to GetMixFormat, HRESULT:" << QString::number(hr, 16);
+            return false;
+        }
+        qDebug() << "Audio Format Info:";
+        qDebug() << "  Sample Rate:" << pwfx->nSamplesPerSec;
+        qDebug() << "  Channels:" << pwfx->nChannels;
+        qDebug() << "  Bits Per Sample:" << pwfx->wBitsPerSample;
+        qDebug() << "  Format Tag:" << pwfx->wFormatTag;
+        qDebug() << "  Block Align:" << pwfx->nBlockAlign;
+        qDebug() << "  Avg Bytes Per Sec:" << pwfx->nAvgBytesPerSec;
+        return true;
+    }
+    // 初始化音频客户端
+    bool initializeAudioClient()
+    {
+        AUDCLNT_SHAREMODE shareMode = AUDCLNT_SHAREMODE_SHARED;
+        DWORD streamFlags = AUDCLNT_STREAMFLAGS_LOOPBACK | AUDCLNT_STREAMFLAGS_AUTOCONVERTPCM
+                            | AUDCLNT_STREAMFLAGS_SRC_DEFAULT_QUALITY;
+        REFERENCE_TIME hnsBufferDuration = 0;
+
+        HRESULT hr
+            = pAudioClient->Initialize(shareMode, streamFlags, hnsBufferDuration, 0, pwfx, nullptr);
+        if (FAILED(hr)) {
+            qDebug() << "Failed to initialize audio client, HRESULT:" << QString::number(hr, 16);
+            return false;
+        }
+        qDebug() << "Audio client initialized successfully";
+        return true;
+    }
+
+    bool setupCaptureClient()
+    {
+        if (!pAudioClient) {
+            qDebug() << "Audio client is null, cannot get capture client";
+            return false;
+        }
+        
+        HRESULT hr = pAudioClient->GetService(__uuidof(IAudioCaptureClient), (void**) &pCaptureClient);
+        if (FAILED(hr)) {
+            qDebug() << "Failed to get capture client, HRESULT:" << QString::number(hr, 16) 
+                     << "AudioClient:" << pAudioClient << "CaptureClient:" << pCaptureClient;
+            return false;
+        }
+        
+        qDebug() << "Capture client obtained successfully:" << pCaptureClient;
+        return true;
+    }
+
+    bool startCapture()
+    {
+        if (!pAudioClient) {
+            qDebug() << "Audio client is null, cannot start capture";
+            return false;
+        }
+        
+        HRESULT hr = pAudioClient->Start();
+        if (FAILED(hr)) {
+            qDebug() << "Failed to start audio client, HRESULT:" << QString::number(hr, 16);
+            return false;
+        }
+        
+        qDebug() << "Audio client started successfully";
+        return true;
+    }
+
+    void stopCapture()
+    {
+        if (pAudioClient) {
+            pAudioClient->Stop();
+        }
+    }
+
+    // 获取音频格式并设置到AudioFormat结构中
+    bool setupAudioFormat(AudioFormat& audioFormat)
+    {
+        if (!pwfx) {
+            return false;
+        }
+
+        // 设置音频格式
+        audioFormat.sampleRate = pwfx->nSamplesPerSec;
+        audioFormat.channels = pwfx->nChannels;
+        audioFormat.bitsPerSample = pwfx->wBitsPerSample;
+        audioFormat.blockAlign = pwfx->nBlockAlign;
+        audioFormat.avgBytesPerSec = pwfx->nAvgBytesPerSec;
+
+        // 如果是浮点格式,保持32bit,让混音器处理格式转换
+        if (pwfx->wFormatTag == WAVE_FORMAT_IEEE_FLOAT) {
+            qDebug() << "Keeping 32t float format for mixer processing";
+        }
+
+        return true;
+    }
+
+    // 处理音频数据
+    int processAudioData(std::vector<char>& buffer, std::mutex& mutex)
+    {
+        UINT32 packetLength = 0;
+        HRESULT hr = pCaptureClient->GetNextPacketSize(&packetLength);
+        if (FAILED(hr)) {
+            qDebug() << "Failed to get next packet size";
+            return -1;
+        }
+        
+        if (packetLength == 0) {
+            return 0;
+        }
+        
+        BYTE* pData = nullptr;
+        UINT32 numFrames = 0;
+        DWORD flags = 0;
+        hr = pCaptureClient->GetBuffer(&pData, &numFrames, &flags, nullptr, nullptr);
+        if (FAILED(hr)) {
+            qDebug() << "Failed to get buffer";
+            return -1;
+        }
+        
+        int bytes = numFrames * pwfx->nBlockAlign;
+
+        if (pwfx->wFormatTag == WAVE_FORMAT_IEEE_FLOAT) {
+            // 32bit浮点格式,直接传递原始数据,让混音器处理格式转换
+            std::lock_guard<std::mutex> lock(mutex);
+            // 限制缓冲区大小,避免内存无限增长
+            const size_t maxBufferSize = 1024 * 1024; // 1MB
+            int bytesToAdd = numFrames * pwfx->nBlockAlign;
+            if (buffer.size() + bytesToAdd > maxBufferSize) {
+                buffer.erase(buffer.begin(), buffer.begin() + (buffer.size() - bytesToAdd));
+            }
+            buffer.insert(buffer.end(),
+                          (char*) pData,
+                          (char*) pData + bytesToAdd);
+        } else {
+            // PCM 直接拷贝
+            std::lock_guard<std::mutex> lock(mutex);
+            // 限制缓冲区大小,避免内存无限增长
+            const size_t maxBufferSize = 1024 * 1024; // 1MB
+            int bytesToAdd = numFrames * pwfx->nBlockAlign;
+            if (buffer.size() + bytesToAdd > maxBufferSize) {
+                buffer.erase(buffer.begin(), buffer.begin() + (buffer.size() - bytesToAdd));
+            }
+            buffer.insert(buffer.end(),
+                          (char*) pData,
+                          (char*) pData + bytesToAdd);
+        }
+
+        pCaptureClient->ReleaseBuffer(numFrames);
+        return bytes;
+    }
+
+    IMMDeviceEnumerator* pEnumerator = nullptr;
+    IMMDevice* pDevice = nullptr;
+    IAudioClient* pAudioClient = nullptr;
+    IAudioCaptureClient* pCaptureClient = nullptr;
+    WAVEFORMATEX* pwfx = nullptr;
+
+    WAVEFORMATEXTENSIBLE _formatex;
+};
+
+WASAPILoopbackCapturer::WASAPILoopbackCapturer(QObject* parent)
+    : QObject(parent)
+    , d(new WASAPILoopbackCapturerPrivate)
+{
+
+}
+
+WASAPILoopbackCapturer::~WASAPILoopbackCapturer()
+{
+    Stop();
+    delete d;
+}
+
+bool WASAPILoopbackCapturer::Init(Type deviceType)
+{
+    // 只支持扬声器
+    if (deviceType != Type::Speaker)
+        return false;
+
+    if (!d->init()) {
+        qDebug() << "Failed to initialize WASAPI components";
+        return false;
+    }
+    
+    // 总是获取首选格式,因为我们需要 pwfx 来初始化音频客户端
+    if (!d->getPreferredFormat()) {
+        qDebug() << "Failed to get preferred format";
+        return false;
+    }
+    
+    if (!d->initializeAudioClient()) {
+        qDebug() << "Failed to initialize audio client";
+        return false;
+    }
+
+    // 设置音频格式
+    if (!d->setupAudioFormat(m_audioFormat)) {
+        qDebug() << "Failed to setup audio format";
+        return false;
+    }
+
+    qDebug() << "WASAPI Loopback Capturer initialized successfully";
+    return true;
+}
+
+bool WASAPILoopbackCapturer::Start()
+{
+    if (m_running)
+        return false;
+        
+    if (!d->setupCaptureClient()) {
+        return false;
+    }
+    
+    if (!d->startCapture()) {
+        return false;
+    }
+    
+    m_running = true;
+    m_captureThread = std::thread(&WASAPILoopbackCapturer::captureThreadFunc, this);
+    return true;
+}
+
+void WASAPILoopbackCapturer::Stop()
+{
+    if (!m_running)
+        return;
+    m_running = false;
+    if (m_captureThread.joinable())
+        m_captureThread.join();
+    d->stopCapture();
+}
+
+const AudioFormat& WASAPILoopbackCapturer::GetFormat() const
+{
+    return m_audioFormat;
+}
+
+int WASAPILoopbackCapturer::readAudioData(char* buf, int maxLen)
+{
+    std::unique_lock<std::mutex> lock(m_mutex);
+    
+    // 按帧对齐读取,确保不会破坏音频帧
+    int blockAlign = m_audioFormat.blockAlign;
+    if (blockAlign <= 0) {
+        return 0;
+    }
+    
+    // 计算可以读取的完整帧数
+    int availableFrames = m_buffer.size() / blockAlign;
+    int requestedFrames = maxLen / blockAlign;
+    int framesToRead = std::min(availableFrames, requestedFrames);
+    
+    if (framesToRead > 0) {
+        int bytesToRead = framesToRead * blockAlign;
+        memcpy(buf, m_buffer.data(), bytesToRead);
+        m_buffer.erase(m_buffer.begin(), m_buffer.begin() + bytesToRead);
+        return bytesToRead;
+    }
+    
+    return 0;
+}
+
+void WASAPILoopbackCapturer::captureThreadFunc()
+{
+    qDebug() << "WASAPI Loopback capture started successfully";
+
+    while (m_running) {
+        int result = d->processAudioData(m_buffer, m_mutex);
+        if (result < 0) {
+            qDebug() << "Error processing audio data";
+            break;
+        }
+        if (result == 0) {
+            Sleep(10);
+        }
+    }
+
+    qDebug() << "WASAPI Loopback capture stopped";
+}

+ 39 - 0
AvRecorder/capturer/audio/wasapi_loopback_capturer.h

@@ -0,0 +1,39 @@
+#ifndef WASAPI_LOOPBACK_CAPTURER_H
+#define WASAPI_LOOPBACK_CAPTURER_H
+
+#include <QObject>
+#include <atomic>
+#include <condition_variable>
+#include <mutex>
+#include <thread>
+#include <vector>
+
+#include "iaudiocapturer.h"
+
+class WASAPILoopbackCapturer : public QObject, public IAudioCapturer
+{
+    Q_OBJECT
+public:
+    explicit WASAPILoopbackCapturer(QObject* parent = nullptr);
+    ~WASAPILoopbackCapturer() override;
+
+    bool Init(Type deviceType) override;
+    bool Start() override;
+    void Stop() override;
+    const AudioFormat& GetFormat() const override;
+    int readAudioData(char* buf, int maxLen) override;
+
+private:
+    void captureThreadFunc();
+
+    std::atomic<bool> m_running{false};
+    std::thread m_captureThread;
+    std::vector<char> m_buffer;
+    std::mutex m_mutex;
+    std::condition_variable m_cv;
+    AudioFormat m_audioFormat;
+
+    class WASAPILoopbackCapturerPrivate* d;
+};
+
+#endif // WASAPI_LOOPBACK_CAPTURER_H

+ 18 - 4
AvRecorder/encoder/audio_mixer.cpp

@@ -3,6 +3,8 @@
 #include "basic/basic.h"
 #include "basic/frame.h"
 
+#include <QDebug>
+
 #include <windows.h>
 
 AVSampleFormat BitsToFmt(int bits)
@@ -164,6 +166,14 @@ AVFrame* AudioMixer::Convert(uint32_t index, uint8_t* inBuf, uint32_t size)
     std::lock_guard<std::mutex> locker(_mutex);
     auto iter = _audioInputInfos.find(index);
     __CheckNullptr(iter != _audioInputInfos.end());
+    
+    // 添加调试信息
+    static int debugCounter = 0;
+    if (++debugCounter % 100 == 0) {
+        qDebug() << "AudioMixer::Convert - Input size:" << size 
+                 << "bytes, Input index: " << index;
+    }
+    
     __CheckNullptr(iter->second.resampler->Convert(inBuf, size));
     return _AdjustVolume() ? _outputFrame : nullptr;
 }
@@ -222,12 +232,16 @@ bool AudioMixer::_AdjustVolume()
             // 所以这里要清零
             memset(writeStream, 0, _outputFrame->linesize[0]);
         }
-        // 逐个计算赋值
+        // 逐个计算赋值,优化音量处理
         for (int idx = 0; idx < _outputFrame->nb_samples; ++idx) {
-            writeStream[idx] += readStream[idx] * scale;
-            if (writeStream[idx] > 0.99) {
-                writeStream[idx] = 0.99f;
+            float sample = readStream[idx] * scale;
+            // 使用软限制而不是硬限制,减少失真
+            if (sample > 0.8f) {
+                sample = 0.8f + (sample - 0.8f) * 0.3f; // 软限制
+            } else if (sample < -0.8f) {
+                sample = -0.8f + (sample + 0.8f) * 0.3f; // 软限制
             }
+            writeStream[idx] = sample;
         }
     }
     return true;

+ 38 - 9
AvRecorder/recorder/audio_recorder.cpp

@@ -1,6 +1,7 @@
 #include "audio_recorder.h"
 
 #include "capturer/audio/audio_qt_capturer.h"
+#include "capturer/audio/wasapi_loopback_capturer.h"
 #include "qdebug.h"
 
 AudioRecorder::AudioRecorder() {}
@@ -29,21 +30,42 @@ bool AudioRecorder::Open(const std::vector<AudioCapturer::Type>& deviceTypes,
     for (int index = 0; index < deviceTypes.size(); ++index) {
         mixInfo.mixIndex = index;
         _infos.push_back(mixInfo);
-        m_audioCapturers.push_back(new QtAudioCapturer());
+        if (deviceTypes[index] == AudioCapturer::Type::Speaker) {
+            m_audioCapturers.push_back(new WASAPILoopbackCapturer());
+        } else {
+            m_audioCapturers.push_back(new QtAudioCapturer());
+        }
     }
 
     // 初始化每个音频捕获器
     for (int index = 0; index < deviceTypes.size(); ++index) {
-        auto capturer = m_audioCapturers[index];
-        if (!capturer->Init(deviceTypes[index])) {
-            continue;
+        if (!m_audioCapturers[index]->Init(deviceTypes[index])) {
+            qDebug() << "Failed to initialize audio capturer" << index;
+            return false;
+        }
+        
+        // 添加音频格式验证
+        const AudioFormat& format = m_audioCapturers[index]->GetFormat();
+        qDebug() << "Audio Capturer" << index << "Format:" << "SampleRate:" << format.sampleRate
+                 << "Channels:" << format.channels << "BitsPerSample:" << format.bitsPerSample
+                 << "BlockAlign:" << format.blockAlign
+                 << "AvgBytesPerSec:" << format.avgBytesPerSec;
+
+        // 验证格式是否与预期一致
+        if (format.sampleRate != sampleRate) {
+            qDebug() << "Warning: Sample rate mismatch. Expected:" << sampleRate
+                     << "Got:" << format.sampleRate;
         }
-        auto&& format = capturer->GetFormat();
+        if (format.channels != channels) {
+            qDebug() << "Warning: Channel count mismatch. Expected:" << channels
+                     << "Got:" << format.channels;
+        }
+        
         __CheckBool(_mixer.AddAudioInput(index,
-                                         format.sampleRate,
-                                         format.channels,
-                                         format.bitsPerSample,
-                                         _GetAVSampleFormat(format.bitsPerSample)));
+                                          format.sampleRate,
+                                          format.channels,
+                                          format.bitsPerSample,
+                                          _GetAVSampleFormat(format.bitsPerSample)));
     }
 
     __CheckBool(_mixer.AddAudioOutput(sampleRate, channels, bitsPerSample, format));
@@ -115,6 +137,13 @@ void AudioRecorder::PullAndProcessAudio()
             char buf[1024];
             int bytes = m_audioCapturers[index]->readAudioData(buf, sizeof(buf));
             if (bytes <= 0) break;
+            
+            // 添加调试信息,显示数据大小
+            // static int debugCounter = 0;
+            // if (++debugCounter % 100 == 0) { // 每100次打印一次,避免日志过多
+            //     qDebug() << "Capturer" << index << "read" << bytes << "bytes";
+            // }
+
             auto frame = _mixer.Convert(index, (uint8_t*)buf, bytes);
             if (frame && _isRecord && _streamIndex != -1) {
                 int frameSize = _mixer.GetOutFrameSize();

+ 18 - 2
AvRecorder/recorder/audio_recorder.h

@@ -45,9 +45,25 @@ private:
     Timer m_audioTimer; // 新增高精度定时器
     static constexpr int AUDIO_PULL_INTERVAL_MS = 10;
     static void _Callback(void* data, size_t size, void* userInfo);
-    AVSampleFormat _GetAVSampleFormat(int wBitsPerSample)
+    AVSampleFormat _GetAVSampleFormat(int wBitsPerSample, bool isFloat = true)
     {
-        return wBitsPerSample == 16 ? AV_SAMPLE_FMT_S16 : AV_SAMPLE_FMT_S32;
+        // isFloat=true 表示32/64位时优先返回浮点格式,否则返回整型
+        switch (wBitsPerSample) {
+            case 8:
+                return AV_SAMPLE_FMT_U8;
+            case 16:
+                return AV_SAMPLE_FMT_S16;
+            case 24:
+                // FFmpeg没有24bit整型,通常用32bit整型或float
+                return isFloat ? AV_SAMPLE_FMT_FLT : AV_SAMPLE_FMT_S32;
+            case 32:
+                return isFloat ? AV_SAMPLE_FMT_FLT : AV_SAMPLE_FMT_S32;
+            case 64:
+                return isFloat ? AV_SAMPLE_FMT_DBL : AV_SAMPLE_FMT_S64;
+            default:
+                // 默认返回float
+                return AV_SAMPLE_FMT_FLT;
+        }
     }
 };
 

+ 56 - 11
AvRecorder/recorder/video_recorder.cpp

@@ -4,19 +4,24 @@
 #include <capturer/finder.h>
 using namespace avrecorder::video;
 
-bool VideoRecorder::Open(HWND srcHwnd, Encoder<MediaType::VIDEO>::Param& param, CaptureMethod method) {
+bool VideoRecorder::Open(HWND srcHwnd, Encoder<MediaType::VIDEO>::Param& param, CaptureMethod method)
+{
     CaptureTarget target;
     target.type = CaptureTargetType::Window;
     target.hwnd = srcHwnd;
     RECT clientRect;
-    if (!GetClientRect(srcHwnd, &clientRect)) return false;
+    if (!GetClientRect(srcHwnd, &clientRect))
+        return false;
     int width = clientRect.right - clientRect.left;
     int height = clientRect.bottom - clientRect.top;
     _capturer.open(target, method, width, height);
     return _Open(param);
 }
 
-bool VideoRecorder::Open(int monitorIdx, Encoder<MediaType::VIDEO>::Param& param, CaptureMethod method) {
+bool VideoRecorder::Open(int monitorIdx,
+                         Encoder<MediaType::VIDEO>::Param& param,
+                         CaptureMethod method)
+{
     CaptureTarget target;
     target.type = CaptureTargetType::Monitor;
     target.monitorIdx = monitorIdx;
@@ -31,13 +36,21 @@ bool VideoRecorder::Open(int monitorIdx, Encoder<MediaType::VIDEO>::Param& param
 bool VideoRecorder::_Open(Encoder<MediaType::VIDEO>::Param& param)
 {
     __CheckBool(_encodeFrame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_NV12,
-                                                              _capturer.getFrame() ? _capturer.getFrame()->width : param.width,
-                                                              _capturer.getFrame() ? _capturer.getFrame()->height : param.height));
+                                                              _capturer.getFrame()
+                                                                  ? _capturer.getFrame()->width
+                                                                  : param.width,
+                                                              _capturer.getFrame()
+                                                                  ? _capturer.getFrame()->height
+                                                                  : param.height));
     {
         std::lock_guard<std::mutex> renderLk(_renderMtx);
         __CheckBool(_renderFrame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_NV12,
-                                                                  _capturer.getFrame() ? _capturer.getFrame()->width : param.width,
-                                                                  _capturer.getFrame() ? _capturer.getFrame()->height : param.height));
+                                                                  _capturer.getFrame()
+                                                                      ? _capturer.getFrame()->width
+                                                                      : param.width,
+                                                                  _capturer.getFrame()
+                                                                      ? _capturer.getFrame()->height
+                                                                      : param.height));
     }
     // 捕获定时器和帧获取逻辑
     _captureTimer.Start(param.fps, [this] {
@@ -69,7 +82,9 @@ AVFrame* VideoRecorder::GetRenderFrame()
     }
     if (_renderFrame->format != _encodeFrame->format) {
         Free(_renderFrame, [this] { av_frame_free(&_renderFrame); });
-        _renderFrame = Frame<MediaType::VIDEO>::Alloc(AVPixelFormat(_encodeFrame->format), _encodeFrame->width, _encodeFrame->height);
+        _renderFrame = Frame<MediaType::VIDEO>::Alloc(AVPixelFormat(_encodeFrame->format),
+                                                      _encodeFrame->width,
+                                                      _encodeFrame->height);
     }
     av_frame_copy(_renderFrame, _encodeFrame);
     return _renderFrame;
@@ -90,9 +105,11 @@ bool VideoRecorder::StartRecord()
     _muxTimer.Start(_param.fps, [this] {
         ++_totalPts;
         bool loss = !_muxer->Write(_encodeFrame, _streamIndex);
-        if (loss) ++_lossPts;
+        if (loss)
+            ++_lossPts;
         _lossHistory.push_back(loss);
-        if (_lossHistory.size() > LOSS_WINDOW) _lossHistory.pop_front();
+        if (_lossHistory.size() > LOSS_WINDOW)
+            _lossHistory.pop_front();
     });
     _isRecord = true;
     return true;
@@ -105,7 +122,8 @@ void VideoRecorder::StopRecord()
 
 double VideoRecorder::GetLossRate()
 {
-    if (_lossHistory.size() < LOSS_WINDOW) return -1.0; // 统计中
+    if (_lossHistory.size() < LOSS_WINDOW)
+        return -1.0; // 统计中
     int lossCount = std::count(_lossHistory.begin(), _lossHistory.end(), true);
     return double(lossCount) / _lossHistory.size();
 }
@@ -118,3 +136,30 @@ void VideoRecorder::Close()
     Free(_encodeFrame, [this] { av_frame_free(&_encodeFrame); });
     Free(_renderFrame, [this] { av_frame_free(&_renderFrame); });
 }
+
+void VideoRecorder::SetCaptureSource(HWND srcHwnd, CaptureMethod method)
+{
+    // 只切换采集源,不重启编码器和推流
+    CaptureTarget target;
+    target.type = CaptureTargetType::Window;
+    target.hwnd = srcHwnd;
+    RECT clientRect;
+    if (!GetClientRect(srcHwnd, &clientRect))
+        return;
+    int width = clientRect.right - clientRect.left;
+    int height = clientRect.bottom - clientRect.top;
+    _capturer.open(target, method, width, height);
+}
+
+void VideoRecorder::SetCaptureSource(int monitorIdx, CaptureMethod method)
+{
+    // 只切换采集源,不重启编码器和推流
+    CaptureTarget target;
+    target.type = CaptureTargetType::Monitor;
+    target.monitorIdx = monitorIdx;
+    auto monitorInfo = MonitorFinder::GetList()[monitorIdx];
+    RECT rect = monitorInfo.rect;
+    int width = rect.right - rect.left;
+    int height = rect.bottom - rect.top;
+    _capturer.open(target, method, width, height);
+}

+ 4 - 0
AvRecorder/recorder/video_recorder.h

@@ -22,8 +22,12 @@ public:
     void SetIsDrawCursor(bool isDraw) { _capturer.setDrawCursor(isDraw); }
     bool IsCaptureOverload() const { return _captureTimer.IsOverload(); }
     double GetLossRate();
+    void SetCaptureSource(HWND srcHwnd, CaptureMethod method);
+    void SetCaptureSource(int monitorIdx, CaptureMethod method);
 
 private:
+    static constexpr int kCanvasWidth = 1920;
+    static constexpr int kCanvasHeight = 1080;
     bool _Open(Encoder<MediaType::VIDEO>::Param& param);
     VideoCaptureManager _capturer;
     AvMuxer* _muxer = nullptr;

+ 39 - 12
AvRecorder/ui/av_recorder.cpp

@@ -178,10 +178,12 @@ void AvRecorder::initConnect()
             return;
         }
         m_isLocked = true;
-        stopPreview();
-        stopCapture();
-        startCapture(CaptureMethod::WGC);
-        startPreview();
+        if (!(m_isRecord || m_isLive)) {
+            stopPreview();
+            stopCapture();
+            startCapture(CaptureMethod::WGC);
+            startPreview();
+        }
         m_isLocked = false;
     });
     connect(m_captureComboBox, QOverload<int>::of(&QComboBox::currentIndexChanged), this, &AvRecorder::onCaptureSourceChanged);
@@ -282,10 +284,22 @@ void AvRecorder::startCapture(CaptureMethod method)
     qintptr ptrHwnd = m_captureComboBox->currentData().value<qintptr>();
 
     bool ok = false;
-    if (idx < monitorCnt) { // 捕获屏幕
-        ok = m_videoRecorder.Open(idx, m_settingsParam.videoParam, method);
-    } else if (type == "window" && ::IsWindow((HWND)ptrHwnd)) {
-        ok = m_videoRecorder.Open((HWND)ptrHwnd, m_settingsParam.videoParam, method);
+    if (m_isRecord || m_isLive) {
+        // 推流/录制时,安全切换采集源
+        if (idx < monitorCnt) {
+            m_videoRecorder.SetCaptureSource(idx, method);
+            ok = true;
+        } else if (type == "window" && ::IsWindow((HWND)ptrHwnd)) {
+            m_videoRecorder.SetCaptureSource((HWND)ptrHwnd, method);
+            ok = true;
+        }
+    } else {
+        // 未推流/录制时,正常 open
+        if (idx < monitorCnt) { // 捕获屏幕
+            ok = m_videoRecorder.Open(idx, m_settingsParam.videoParam, method);
+        } else if (type == "window" && ::IsWindow((HWND)ptrHwnd)) {
+            ok = m_videoRecorder.Open((HWND)ptrHwnd, m_settingsParam.videoParam, method);
+        }
     }
     if (!ok) {
         // 可选:弹窗或日志提示
@@ -369,9 +383,9 @@ bool AvRecorder::startStream(std::string_view path, std::string_view format)
     m_recordTime = QTime::currentTime();
     m_captureStatusLabel->setText("状态: 正在工作");
     m_settingsBtn->setEnabled(false);
-    m_captureComboBox->setEnabled(false);
+    // m_captureComboBox->setEnabled(false); // 禁用采集源切换
     m_updateListBtn->setEnabled(false);
-    m_captureMethodBox->setEnabled(false);
+    m_captureMethodBox->setEnabled(false); // 禁用采集方式切换
     return true;
 }
 
@@ -389,9 +403,9 @@ void AvRecorder::stopStream()
 
     m_captureStatusLabel->setText("状态: 正常");
     m_settingsBtn->setEnabled(true);
-    m_captureComboBox->setEnabled(true);
+    m_captureComboBox->setEnabled(true); // 恢复采集源切换
     m_updateListBtn->setEnabled(true);
-    m_captureMethodBox->setEnabled(true);
+    m_captureMethodBox->setEnabled(true); // 恢复采集方式切换
 }
 bool AvRecorder::startSyncRecord()
 {
@@ -479,4 +493,17 @@ void AvRecorder::onCaptureSourceChanged() {
     int monitorCnt = (int)MonitorFinder::GetList().size();
     bool isMonitor = (idx >= 0 && idx < monitorCnt);
     updateCaptureMethodBox(isMonitor);
+    // 新增:推流/录制时切换采集源不中断
+    if (m_isRecord || m_isLive) {
+        CaptureMethod method = CaptureMethod::WGC;
+        QString methodText = m_captureMethodBox->currentText();
+        if (methodText == "DXGI") method = CaptureMethod::DXGI;
+        else if (methodText == "GDI") method = CaptureMethod::GDI;
+        if (isMonitor) {
+            m_videoRecorder.SetCaptureSource(idx, method);
+        } else {
+            qintptr ptrHwnd = m_captureComboBox->currentData().value<qintptr>();
+            m_videoRecorder.SetCaptureSource((HWND)ptrHwnd, method);
+        }
+    }
 }