zhuizhu 7 месяцев назад
Родитель
Сommit
f804baf45e

+ 51 - 19
AvRecorder/basic/basic.h

@@ -5,12 +5,15 @@
 #include <functional>
 #include <mutex>
 #include <thread>
+#include <cstdio>
 
 extern "C" {
 #include <libavcodec/avcodec.h>
 #include <libavformat/avformat.h>
+#include <libavutil/error.h>
 }
 
+#include <winerror.h>
 // ***************
 // MUTEX
 extern std::mutex __mtx;
@@ -22,29 +25,58 @@ extern std::mutex __mtx;
 
 #ifdef __AVDEBUG
 #define __DebugPrint(fmtStr, ...) \
-    std::printf("[" __FILE__ ", line:%d] " fmtStr "\n", __LINE__, ##__VA_ARGS__)
-#define __Str(exp) #exp
-#define __Check(retVal, ...)                            \
-    do {                                                \
-        if (!(__VA_ARGS__)) {                           \
-            __DebugPrint(__Str(__VA_ARGS__) " failed"); \
-            return retVal;                              \
-        }                                               \
-    } while (false)
-
+    std::printf("[" __FILE__ ", line:%d, func:%s] " fmtStr "\n", __LINE__, __FUNCTION__, ##__VA_ARGS__)
 #else
 #define __DebugPrint(fmtStr, ...)
-#define __Check(retVal, ...)  \
-    do {                      \
-        if (!(__VA_ARGS__)) { \
-            return retVal;    \
-        }                     \
-    } while (false)
 #endif
 
-#define __CheckNo(...) __Check(, __VA_ARGS__)
-#define __CheckBool(...) __Check(false, __VA_ARGS__)
-#define __CheckNullptr(...) __Check(nullptr, __VA_ARGS__)
+#define __Str(exp) #exp
+
+// Legacy __Check* macros removed. Use CheckBool/CheckHR/CheckFF helpers below.
+
+// ================= Function-based checks (preferred going forward) =================
+// Return false on failure and print diagnostics. Callers can early-return appropriate values.
+inline bool CheckBool(bool condition, const char* what = nullptr)
+{
+    if (!condition) {
+        if (what) {
+            __DebugPrint("Check failed: %s", what);
+        } else {
+            __DebugPrint("Check failed");
+        }
+        return false;
+    }
+    return true;
+}
+
+inline bool CheckHR(HRESULT hr, const char* what = nullptr)
+{
+    if (FAILED(hr)) {
+        if (what) {
+            __DebugPrint("HRESULT failed (0x%08lx): %s", static_cast<unsigned long>(hr), what);
+        } else {
+            __DebugPrint("HRESULT failed (0x%08lx)", static_cast<unsigned long>(hr));
+        }
+        return false;
+    }
+    return true;
+}
+
+inline bool CheckFF(int rc, const char* what = nullptr)
+{
+    if (rc < 0) {
+        char __errbuf[AV_ERROR_MAX_STRING_SIZE];
+        av_strerror(rc, __errbuf, sizeof(__errbuf));
+        if (what) {
+            __DebugPrint("FFmpeg failed: rc=%d (%s) at %s", rc, __errbuf, what);
+        } else {
+            __DebugPrint("FFmpeg failed: rc=%d (%s)", rc, __errbuf);
+        }
+        return false;
+    }
+    return true;
+}
+
 enum class MediaType {
     AUDIO,
     VIDEO

+ 69 - 19
AvRecorder/basic/frame.cpp

@@ -9,15 +9,22 @@ AVFrame* Frame<MediaType::AUDIO>::Alloc(AVSampleFormat sampleFmt,
     const AVChannelLayout* channel_layout,
     int sampleRate, int nbSamples)
 {
-    AVFrame* frame = nullptr;
-    __CheckNullptr(frame = av_frame_alloc());
+    AVFrame* frame = av_frame_alloc();
+    if (!frame) {
+        __DebugPrint("av_frame_alloc failed");
+        return nullptr;
+    }
     frame->format = sampleFmt;
     av_channel_layout_copy(&frame->ch_layout, channel_layout);
     frame->sample_rate = sampleRate;
     frame->nb_samples = nbSamples;
 
     /* allocate the buffers for the frame data */
-    __CheckNullptr(av_frame_get_buffer(frame, 0) >= 0);
+    if (av_frame_get_buffer(frame, 0) < 0) {
+        __DebugPrint("av_frame_get_buffer failed");
+        av_frame_free(&frame);
+        return nullptr;
+    }
     return frame;
 }
 
@@ -25,7 +32,11 @@ Frame<MediaType::AUDIO>::Frame(AVSampleFormat sampleFmt,
     const AVChannelLayout* channel_layout, int sampleRate,
     int nbSamples)
 {
-    __CheckNo(frame = Alloc(sampleFmt, channel_layout, sampleRate, nbSamples));
+    frame = Alloc(sampleFmt, channel_layout, sampleRate, nbSamples);
+    if (!frame) {
+        __DebugPrint("Frame<MediaType::AUDIO>::Alloc failed");
+        return;
+    }
 }
 
 Frame<MediaType::AUDIO>::Frame(AVFrame* frame)
@@ -34,26 +45,46 @@ Frame<MediaType::AUDIO>::Frame(AVFrame* frame)
         this->frame = nullptr;
         return;
     }
-    __CheckNo(this->frame = Alloc(AVSampleFormat(frame->format), &frame->ch_layout, frame->sample_rate, frame->nb_samples));
-    __CheckNo(av_frame_copy(this->frame, frame) >= 0);
+    this->frame = Alloc(AVSampleFormat(frame->format), &frame->ch_layout, frame->sample_rate, frame->nb_samples);
+    if (!this->frame) {
+        __DebugPrint("Alloc failed in AUDIO copy ctor");
+        return;
+    }
+    if (av_frame_copy(this->frame, frame) < 0) {
+        __DebugPrint("av_frame_copy failed in AUDIO copy ctor");
+        av_frame_free(&this->frame);
+        this->frame = nullptr;
+        return;
+    }
 }
 
 Frame<MediaType::VIDEO>::Frame(AVPixelFormat pixFmt, int width, int height)
 {
-    __CheckNo(frame = Alloc(pixFmt, width, height));
+    frame = Alloc(pixFmt, width, height);
+    if (!frame) {
+        __DebugPrint("Frame<MediaType::VIDEO>::Alloc failed");
+        return;
+    }
 }
 
 AVFrame* Frame<MediaType::VIDEO>::Alloc(AVPixelFormat pixFmt, int width, int height)
 {
-    AVFrame* frame = nullptr;
-    __CheckNullptr(frame = av_frame_alloc());
+    AVFrame* frame = av_frame_alloc();
+    if (!frame) {
+        __DebugPrint("av_frame_alloc failed");
+        return nullptr;
+    }
 
     frame->format = pixFmt;
     frame->width = width;
     frame->height = height;
 
     /* allocate the buffers for the frame data */
-    __CheckNullptr(av_frame_get_buffer(frame, 0) >= 0);
+    if (av_frame_get_buffer(frame, 0) < 0) {
+        __DebugPrint("av_frame_get_buffer failed");
+        av_frame_free(&frame);
+        return nullptr;
+    }
     return frame;
 }
 
@@ -63,8 +94,17 @@ Frame<MediaType::VIDEO>::Frame(AVFrame* frame)
         this->frame = nullptr;
         return;
     }
-    __CheckNo(this->frame = Alloc(AVPixelFormat(frame->format), frame->width, frame->height));
-    __CheckNo(av_frame_copy(this->frame, frame) >= 0);
+    this->frame = Alloc(AVPixelFormat(frame->format), frame->width, frame->height);
+    if (!this->frame) {
+        __DebugPrint("Alloc failed in VIDEO copy ctor");
+        return;
+    }
+    if (av_frame_copy(this->frame, frame) < 0) {
+        __DebugPrint("av_frame_copy failed in VIDEO copy ctor");
+        av_frame_free(&this->frame);
+        this->frame = nullptr;
+        return;
+    }
 }
 
 bool FfmpegConverter::SetSize(int width, int height)
@@ -72,12 +112,20 @@ bool FfmpegConverter::SetSize(int width, int height)
     Free(_swsCtx, [this] { sws_freeContext(_swsCtx); });
     Free(_frameTo, [this] { av_frame_free(&_frameTo); });
     // 创建格式转换
-    __CheckBool(_swsCtx = sws_getContext(
+    _swsCtx = sws_getContext(
                     width, height, _from,
                     width, height, _to,
-                    0, NULL, NULL, NULL));
+                    0, NULL, NULL, NULL);
+    if (!_swsCtx) {
+        __DebugPrint("sws_getContext failed");
+        return false;
+    }
 
-    __CheckBool(_frameTo = Frame<MediaType::VIDEO>::Alloc(_to, width, height));
+    _frameTo = Frame<MediaType::VIDEO>::Alloc(_to, width, height);
+    if (!_frameTo) {
+        __DebugPrint("Frame<MediaType::VIDEO>::Alloc failed");
+        return false;
+    }
     return true;
 }
 
@@ -87,11 +135,13 @@ AVFrame* FfmpegConverter::Trans(AVFrame* frameFrom)
     if (frameFrom == nullptr) {
         return _frameTo;
     }
-    __CheckNullptr(
-        sws_scale(_swsCtx, (const uint8_t* const*)frameFrom->data,
+    int rc = sws_scale(_swsCtx, (const uint8_t* const*)frameFrom->data,
             frameFrom->linesize, 0, frameFrom->height, _frameTo->data,
-            _frameTo->linesize)
-        >= 0);
+            _frameTo->linesize);
+    if (rc < 0) {
+        __DebugPrint("sws_scale failed: rc=%d", rc);
+        return nullptr;
+    }
     return _frameTo;
 }
 

+ 5 - 1
AvRecorder/basic/timer.h

@@ -19,7 +19,11 @@ public:
         _fps = fps;
         _tickCnt = 0;
         _isOverload = false;
-        __CheckNo(!_isRunning);
+
+        if (_isRunning) {
+            // Already running, ignore Start call
+            return;
+        }
         using namespace std::chrono;
         _isRunning = true;
         _beginTime = high_resolution_clock::now();

+ 102 - 62
AvRecorder/capturer/audio/audio_capturer.cpp

@@ -11,15 +11,15 @@ bool AudioCapturer::Init(Type deviceType)
 {
     Stop();
     _deviceType = deviceType;
-    __CheckBool(_CreateDeviceEnumerator(&_pDeviceEnumerator));
-    __CheckBool(_CreateDevice(_pDeviceEnumerator, &_pDevice));
-    __CheckBool(_CreateAudioClient(_pDevice, &_pAudioClient));
+    if (!_CreateDeviceEnumerator(&_pDeviceEnumerator)) { __DebugPrint("_CreateDeviceEnumerator failed"); return false; }
+    if (!_CreateDevice(_pDeviceEnumerator, &_pDevice)) { __DebugPrint("_CreateDevice failed"); return false; }
+    if (!_CreateAudioClient(_pDevice, &_pAudioClient)) { __DebugPrint("_CreateAudioClient failed"); return false; }
 
     if (!_IsFormatSupported(_pAudioClient)) {
-        __CheckBool(_GetPreferFormat(_pAudioClient, &_formatex));
+        if (!_GetPreferFormat(_pAudioClient, &_formatex)) { __DebugPrint("_GetPreferFormat failed"); return false; }
     }
-    __CheckBool(_InitAudioClient(_pAudioClient, &_formatex));
-    __CheckBool(_CreateAudioCaptureClient(_pAudioClient, &_pAudioCaptureClient));
+    if (!_InitAudioClient(_pAudioClient, &_formatex)) { __DebugPrint("_InitAudioClient failed"); return false; }
+    if (!_CreateAudioCaptureClient(_pAudioClient, &_pAudioCaptureClient)) { __DebugPrint("_CreateAudioCaptureClient failed"); return false; }
 
     _format.sampleRate = _formatex.Format.nSamplesPerSec;
     _format.channels = _formatex.Format.nChannels;
@@ -33,7 +33,7 @@ bool AudioCapturer::Init(Type deviceType)
 
 bool AudioCapturer::Start()
 {
-    __CheckBool(_isInit);
+    if (!_isInit) { return false; }
     
     // 如果是麦克风设备,启动静音播放器确保音频引擎活跃
     if (_deviceType == Microphone) {
@@ -78,22 +78,25 @@ bool AudioCapturer::_CreateDeviceEnumerator(IMMDeviceEnumerator** enumerator)
 {
     // __CheckBool(SUCCEEDED(CoInitializeEx(nullptr, COINIT_MULTITHREADED)));
     // __CheckBool(SUCCEEDED(CoInitializeEx(nullptr, COINIT_APARTMENTTHREADED)));
-    __CheckBool(SUCCEEDED(CoCreateInstance(__uuidof(MMDeviceEnumerator), NULL, CLSCTX_ALL,
+    HRESULT hr = CoCreateInstance(__uuidof(MMDeviceEnumerator), NULL, CLSCTX_ALL,
         __uuidof(IMMDeviceEnumerator),
-        reinterpret_cast<void**>(enumerator))));
+        reinterpret_cast<void**>(enumerator));
+    if (FAILED(hr)) { __DebugPrint("CoCreateInstance for MMDeviceEnumerator failed, hr=0x%08lx", static_cast<unsigned long>(hr)); return false; }
     return true;
 }
 bool AudioCapturer::_CreateDevice(IMMDeviceEnumerator* enumerator, IMMDevice** device)
 {
     EDataFlow enDataFlow = _deviceType == Microphone ? eCapture : eRender;
     ERole enRole = eConsole;
-    __CheckBool(SUCCEEDED(enumerator->GetDefaultAudioEndpoint(enDataFlow, enRole, device)));
+    HRESULT hr = enumerator->GetDefaultAudioEndpoint(enDataFlow, enRole, device);
+    if (FAILED(hr)) { __DebugPrint("GetDefaultAudioEndpoint failed, hr=0x%08lx", static_cast<unsigned long>(hr)); return false; }
     return true;
 }
 bool AudioCapturer::_CreateAudioClient(IMMDevice* device, IAudioClient** audioClient)
 {
-    __CheckBool(SUCCEEDED(device->Activate(__uuidof(IAudioClient), CLSCTX_ALL, NULL,
-        (void**)audioClient)));
+    HRESULT hr = device->Activate(__uuidof(IAudioClient), CLSCTX_ALL, NULL,
+        (void**)audioClient);
+    if (FAILED(hr)) { __DebugPrint("IAudioClient Activate failed, hr=0x%08lx", static_cast<unsigned long>(hr)); return false; }
     return true;
 }
 bool AudioCapturer::_IsFormatSupported(IAudioClient* audioClient)
@@ -128,10 +131,12 @@ bool AudioCapturer::_GetPreferFormat(IAudioClient* audioClient,
     WAVEFORMATEXTENSIBLE* formatex)
 {
     WAVEFORMATEX* format = nullptr;
-    __CheckBool(SUCCEEDED(audioClient->GetMixFormat(&format)));
+    HRESULT hr = audioClient->GetMixFormat(&format);
+    if (FAILED(hr)) { __DebugPrint("GetMixFormat failed, hr=0x%08lx", static_cast<unsigned long>(hr)); return false; }
     formatex->Format.nSamplesPerSec = format->nSamplesPerSec;
     formatex->Format.wBitsPerSample = format->wBitsPerSample;
     formatex->Format.nChannels = format->nChannels;
+    CoTaskMemFree(format);
     return true;
 }
 bool AudioCapturer::_InitAudioClient(IAudioClient* audioClient,
@@ -156,21 +161,30 @@ bool AudioCapturer::_InitAudioClient(IAudioClient* audioClient,
     formatex->dwChannelMask = format->nChannels == 1 ? KSAUDIO_SPEAKER_MONO : KSAUDIO_SPEAKER_STEREO;
     formatex->SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
 
-    __CheckBool(SUCCEEDED(audioClient->Initialize(shareMode, streamFlags, hnsBufferDuration, 0,
-        format, nullptr)));
+    HRESULT hr = audioClient->Initialize(shareMode, streamFlags, hnsBufferDuration, 0,
+        format, nullptr);
+    if (FAILED(hr)) { __DebugPrint("IAudioClient Initialize failed, hr=0x%08lx", static_cast<unsigned long>(hr)); return false; }
     return true;
 }
 
 bool AudioCapturer::_CreateAudioCaptureClient(IAudioClient* audioClient,
     IAudioCaptureClient** audioCaptureClient)
 {
-    __CheckBool(SUCCEEDED(audioClient->GetService(IID_PPV_ARGS(audioCaptureClient))));
+    HRESULT hr = audioClient->GetService(IID_PPV_ARGS(audioCaptureClient));
+    if (FAILED(hr)) { __DebugPrint("GetService(IAudioCaptureClient) failed, hr=0x%08lx", static_cast<unsigned long>(hr)); return false; }
     return true;
 }
 
 bool AudioCapturer::_ThreadRun(IAudioClient* audio_client,
     IAudioCaptureClient* audio_capture_client)
 {
+    // 初始化COM到MTA,确保当前线程可安全调用COM接口
+    HRESULT hrCoInit = CoInitializeEx(nullptr, COINIT_MULTITHREADED);
+    if (FAILED(hrCoInit)) {
+        __DebugPrint("CoInitializeEx failed, hr=0x%08lx", static_cast<unsigned long>(hrCoInit));
+        return false;
+    }
+
     UINT32 num_success = 0;
     BYTE* p_audio_data = nullptr;
     UINT32 num_frames_to_read = 0;
@@ -180,13 +194,15 @@ bool AudioCapturer::_ThreadRun(IAudioClient* audio_client,
     while (_loopFlag) {
         SleepMs(5);
         while (true) {
-            __CheckBool(SUCCEEDED(audio_capture_client->GetNextPacketSize(&num_frames_in_next_packet)));
+            HRESULT hr = audio_capture_client->GetNextPacketSize(&num_frames_in_next_packet);
+            if (FAILED(hr)) { __DebugPrint("GetNextPacketSize failed, hr=0x%08lx", static_cast<unsigned long>(hr)); audio_client->Stop(); CoUninitialize(); return false; }
             if (num_frames_in_next_packet == 0) {
                 break;
             }
 
-            __CheckBool(SUCCEEDED(audio_capture_client->GetBuffer(&p_audio_data, &num_frames_to_read,
-                &dw_flag, nullptr, nullptr)));
+            hr = audio_capture_client->GetBuffer(&p_audio_data, &num_frames_to_read,
+                &dw_flag, nullptr, nullptr);
+            if (FAILED(hr)) { __DebugPrint("GetBuffer failed, hr=0x%08lx", static_cast<unsigned long>(hr)); audio_client->Stop(); CoUninitialize(); return false; }
 
             size_t size = (_formatex.Format.wBitsPerSample >> 3) * _formatex.Format.nChannels * num_frames_to_read;
             {
@@ -195,25 +211,16 @@ bool AudioCapturer::_ThreadRun(IAudioClient* audio_client,
                 _buffer.resize(oldSize + size);
                 memcpy(_buffer.data() + oldSize, p_audio_data, size);
             }
-            __CheckBool(SUCCEEDED(audio_capture_client->ReleaseBuffer(num_frames_to_read)));
+            hr = audio_capture_client->ReleaseBuffer(num_frames_to_read);
+            if (FAILED(hr)) { __DebugPrint("ReleaseBuffer failed, hr=0x%08lx", static_cast<unsigned long>(hr)); audio_client->Stop(); CoUninitialize(); return false; }
         }
     }
 
     audio_client->Stop();
+    CoUninitialize();
     return true;
 }
 
-int AudioCapturer::readAudioData(char* buf, int maxLen)
-{
-    std::lock_guard<std::mutex> lock(_bufferMutex);
-    int toRead = std::min<int>(maxLen, _buffer.size());
-    if (toRead > 0) {
-        memcpy(buf, _buffer.data(), toRead);
-        _buffer.erase(_buffer.begin(), _buffer.begin() + toRead);
-    }
-    return toRead;
-}
-
 // 静音播放器实现
 bool AudioCapturer::_InitializeSilencePlayer()
 {
@@ -273,43 +280,33 @@ bool AudioCapturer::_InitializeSilencePlayer()
     return true;
 }
 
-void AudioCapturer::_CleanupSilencePlayer()
+void AudioCapturer::_SilencePlayerThreadFunc()
 {
-    // 停止静音播放线程
-    if (_silencePlayerRunning) {
-        _silencePlayerRunning = false;
-        if (_silencePlayerThread && _silencePlayerThread->joinable()) {
-            _silencePlayerThread->join();
-            delete _silencePlayerThread;
-            _silencePlayerThread = nullptr;
-        }
-    }
-    
-    // 清理 WASAPI 资源
-    if (_pSilenceAudioClient) {
-        _pSilenceAudioClient->Stop();
-        _pSilenceAudioClient->Release();
-        _pSilenceAudioClient = nullptr;
-    }
-    
-    if (_pSilenceRenderClient) {
-        _pSilenceRenderClient->Release();
-        _pSilenceRenderClient = nullptr;
-    }
-    
-    if (_pSilenceDevice) {
-        _pSilenceDevice->Release();
-        _pSilenceDevice = nullptr;
+    // 线程内初始化COM
+    HRESULT hrCoInit = CoInitializeEx(nullptr, COINIT_MULTITHREADED);
+    if (FAILED(hrCoInit)) {
+        __DebugPrint("CoInitializeEx for silence thread failed, hr=0x%08lx", static_cast<unsigned long>(hrCoInit));
+        return;
     }
-}
 
-void AudioCapturer::_SilencePlayerThreadFunc()
-{
     UINT32 bufferFrameCount;
     HRESULT hr = _pSilenceAudioClient->GetBufferSize(&bufferFrameCount);
     if (FAILED(hr)) {
+        CoUninitialize();
         return;
     }
+
+    // 计算每帧字节数(使用设备混音格式)
+    WAVEFORMATEX* pMix = nullptr;
+    UINT32 frameBytes = 0;
+    hr = _pSilenceAudioClient->GetMixFormat(&pMix);
+    if (SUCCEEDED(hr) && pMix) {
+        frameBytes = pMix->nBlockAlign;
+        CoTaskMemFree(pMix);
+    } else {
+        // 回退:猜测为立体声float(安全起见仍可为0填充任何格式)
+        frameBytes = sizeof(float) * 2;
+    }
     
     while (_silencePlayerRunning) {
         UINT32 numFramesPadding;
@@ -325,11 +322,54 @@ void AudioCapturer::_SilencePlayerThreadFunc()
             hr = _pSilenceRenderClient->GetBuffer(numFramesAvailable, &pData);
             if (SUCCEEDED(hr)) {
                 // 填充静音数据(全零)
-                memset(pData, 0, numFramesAvailable * sizeof(float) * 2); // 假设立体声
-                hr = _pSilenceRenderClient->ReleaseBuffer(numFramesAvailable, 0);
+                memset(pData, 0, numFramesAvailable * frameBytes);
+                _pSilenceRenderClient->ReleaseBuffer(numFramesAvailable, 0);
             }
         }
         
         Sleep(10); // 10ms 间隔
     }
+
+    CoUninitialize();
+}
+
+int AudioCapturer::readAudioData(char* buf, int maxLen)
+{
+    std::lock_guard<std::mutex> lock(_bufferMutex);
+    int toRead = std::min<int>(maxLen, _buffer.size());
+    if (toRead > 0) {
+        memcpy(buf, _buffer.data(), toRead);
+        _buffer.erase(_buffer.begin(), _buffer.begin() + toRead);
+    }
+    return toRead;
+}
+
+void AudioCapturer::_CleanupSilencePlayer()
+{
+    // 停止静音播放线程
+    if (_silencePlayerRunning) {
+        _silencePlayerRunning = false;
+        if (_silencePlayerThread && _silencePlayerThread->joinable()) {
+            _silencePlayerThread->join();
+            delete _silencePlayerThread;
+            _silencePlayerThread = nullptr;
+        }
+    }
+    
+    // 清理 WASAPI 资源
+    if (_pSilenceAudioClient) {
+        _pSilenceAudioClient->Stop();
+        _pSilenceAudioClient->Release();
+        _pSilenceAudioClient = nullptr;
+    }
+    
+    if (_pSilenceRenderClient) {
+        _pSilenceRenderClient->Release();
+        _pSilenceRenderClient = nullptr;
+    }
+    
+    if (_pSilenceDevice) {
+        _pSilenceDevice->Release();
+        _pSilenceDevice = nullptr;
+    }
 }

+ 82 - 26
AvRecorder/capturer/video/DxgiCapturer.cpp

@@ -51,15 +51,29 @@ bool DxgiCapturerPrivate::Open(int left, int top, int width, int height) {
             break;
         }
     }
-    __CheckBool(SUCCEEDED(hr));
+    if (FAILED(hr)) {
+        __DebugPrint("D3D11CreateDevice failed: 0x%08lx", (unsigned long)hr);
+        return false;
+    }
     // Get DXGI device
     IDXGIDevice* hDxgiDevice = nullptr;
-    __CheckBool(SUCCEEDED(_hDevice->QueryInterface(__uuidof(IDXGIDevice), reinterpret_cast<void**>(&hDxgiDevice))));
+    hr = _hDevice->QueryInterface(__uuidof(IDXGIDevice), reinterpret_cast<void**>(&hDxgiDevice));
+    if (FAILED(hr)) {
+        __DebugPrint("QueryInterface IDXGIDevice failed: 0x%08lx", (unsigned long)hr);
+        Free(_hContext, [this]{ _hContext->Release(); });
+        Free(_hDevice, [this]{ _hDevice->Release(); });
+        return false;
+    }
     // Get DXGI adapter
     IDXGIAdapter* hDxgiAdapter = nullptr;
     hr = hDxgiDevice->GetParent(__uuidof(IDXGIAdapter), reinterpret_cast<void**>(&hDxgiAdapter));
     Free(hDxgiDevice, [=] { hDxgiDevice->Release(); });
-    __CheckBool(SUCCEEDED(hr));
+    if (FAILED(hr)) {
+        __DebugPrint("GetParent IDXGIAdapter failed: 0x%08lx", (unsigned long)hr);
+        Free(_hContext, [this]{ _hContext->Release(); });
+        Free(_hDevice, [this]{ _hDevice->Release(); });
+        return false;
+    }
     // Get output
     IDXGIOutput* hDxgiOutput = nullptr;
     DXGI_OUTPUT_DESC dxgiOutDesc;
@@ -71,16 +85,32 @@ bool DxgiCapturerPrivate::Open(int left, int top, int width, int height) {
         }
     }
     Free(hDxgiAdapter, [=] { hDxgiAdapter->Release(); });
-    __CheckBool(SUCCEEDED(hr));
+    if (FAILED(hr)) {
+        __DebugPrint("EnumOutputs failed: 0x%08lx", (unsigned long)hr);
+        Free(hDxgiOutput, [=]{ hDxgiOutput->Release(); });
+        Free(_hContext, [this]{ _hContext->Release(); });
+        Free(_hDevice, [this]{ _hDevice->Release(); });
+        return false;
+    }
     // QI for Output 1
     IDXGIOutput1* hDxgiOutput1 = nullptr;
     hr = hDxgiOutput->QueryInterface(__uuidof(hDxgiOutput1), reinterpret_cast<void**>(&hDxgiOutput1));
     Free(hDxgiOutput, [=] { hDxgiOutput->Release(); });
-    __CheckBool(SUCCEEDED(hr));
+    if (FAILED(hr)) {
+        __DebugPrint("QueryInterface IDXGIOutput1 failed: 0x%08lx", (unsigned long)hr);
+        Free(_hContext, [this]{ _hContext->Release(); });
+        Free(_hDevice, [this]{ _hDevice->Release(); });
+        return false;
+    }
     // Create desktop duplication
     hr = hDxgiOutput1->DuplicateOutput(_hDevice, &_hDeskDupl);
     Free(hDxgiOutput1, [=] { hDxgiOutput1->Release(); });
-    __CheckBool(SUCCEEDED(hr));
+    if (FAILED(hr)) {
+        __DebugPrint("DuplicateOutput failed: 0x%08lx", (unsigned long)hr);
+        Free(_hContext, [this]{ _hContext->Release(); });
+        Free(_hDevice, [this]{ _hDevice->Release(); });
+        return false;
+    }
     // Set ColorSpace
     D3D11_VIDEO_PROCESSOR_COLOR_SPACE inputColorSpace;
     inputColorSpace.Usage = 1;
@@ -97,25 +127,25 @@ bool DxgiCapturerPrivate::Open(int left, int top, int width, int height) {
     _rgbToNv12.Open(_hDevice, _hContext, inputColorSpace, outputColorSpace);
     _nv12Frame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_NV12, width, height);
     _xrgbFrame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_BGR0, width, height);
-    __CheckBool(_nv12Frame);
-    __CheckBool(_xrgbFrame);
+    if (!_nv12Frame) {
+        __DebugPrint("Alloc NV12 frame failed");
+        Free(_hDeskDupl, [this]{ _hDeskDupl->Release(); });
+        Free(_hContext, [this]{ _hContext->Release(); });
+        Free(_hDevice, [this]{ _hDevice->Release(); });
+        return false;
+    }
+    if (!_xrgbFrame) {
+        __DebugPrint("Alloc XRGB frame failed");
+        Free(_nv12Frame, [this]{ av_frame_free(&_nv12Frame); });
+        Free(_hDeskDupl, [this]{ _hDeskDupl->Release(); });
+        Free(_hContext, [this]{ _hContext->Release(); });
+        Free(_hDevice, [this]{ _hDevice->Release(); });
+        return false;
+    }
     _bInit = true;
     return true;
 }
 
-void DxgiCapturerPrivate::Close() {
-    if (!_bInit) return;
-    _bInit = false;
-    _nv12Buffers.Clear();
-    _xrgbBuffers.Clear();
-    _rgbToNv12.Close();
-    Free(_nv12Frame, [this] { av_frame_free(&_nv12Frame); });
-    Free(_xrgbFrame, [this] { av_frame_free(&_xrgbFrame); });
-    Free(_hDeskDupl, [this] { _hDeskDupl->Release(); });
-    Free(_hDevice, [this] { _hDevice->Release(); });
-    Free(_hContext, [this] { _hContext->Release(); });
-}
-
 AVFrame* DxgiCapturerPrivate::GetFrame(
     bool shouldDrawCursor, int left, int top, int right, int bottom)
 {
@@ -132,7 +162,11 @@ AVFrame* DxgiCapturerPrivate::GetFrame(
     ID3D11Texture2D* srcImage = nullptr;
     hr = hDesktopResource->QueryInterface(__uuidof(ID3D11Texture2D), reinterpret_cast<void**>(&srcImage));
     Free(hDesktopResource, [=] { hDesktopResource->Release(); });
-    __CheckNullptr(SUCCEEDED(hr));
+    if (FAILED(hr)) {
+        __DebugPrint("QueryInterface ID3D11Texture2D failed: 0x%08lx", (unsigned long)hr);
+        _hDeskDupl->ReleaseFrame();
+        return nullptr;
+    }
     srcImage->GetDesc(&_desc);
     // create a new staging buffer for fill frame image
     auto desc = _desc;
@@ -148,7 +182,7 @@ AVFrame* DxgiCapturerPrivate::GetFrame(
     if (FAILED(hr)) {
         __DebugPrint("Create _gdiImage failed");
         Free(srcImage, [=] { srcImage->Release(); });
-        Free(_hDeskDupl, [this] { _hDeskDupl->ReleaseFrame(); });
+        _hDeskDupl->ReleaseFrame();
         return nullptr;
     }
     // copy next staging buffer to new staging buffer
@@ -177,7 +211,13 @@ AVFrame* DxgiCapturerPrivate::GetFrame(
         // 创建一个临时的纹理
         ID3D11Texture2D* tmpImage = nullptr;
         _desc.MiscFlags = 2050;
-        __CheckNullptr(SUCCEEDED(_hDevice->CreateTexture2D(&_desc, nullptr, &tmpImage)));
+        hr = _hDevice->CreateTexture2D(&_desc, nullptr, &tmpImage);
+        if (FAILED(hr)) {
+            __DebugPrint("CreateTexture2D tmpImage failed: 0x%08lx", (unsigned long)hr);
+            Free(_hStagingSurf, [this] { _hStagingSurf->Release(); });
+            Free(_gdiImage, [this] { _gdiImage->Release(); });
+            return nullptr;
+        }
         _hContext->CopyResource(tmpImage, _gdiImage);
         // 首先尝试创建 NV12 纹理
         AVFrame* frame = nullptr;
@@ -202,7 +242,10 @@ void DxgiCapturerPrivate::drawCursor(HDC hdc, int left, int top, int right, int
 {
     CURSORINFO ci;
     ci.cbSize = sizeof(CURSORINFO);
-    __CheckNo(GetCursorInfo(&ci));
+    if (!GetCursorInfo(&ci)) {
+        __DebugPrint("GetCursorInfo failed");
+        return;
+    }
     int cursorX = ci.ptScreenPos.x;
     int cursorY = ci.ptScreenPos.y;
 
@@ -240,6 +283,19 @@ bool DxgiCapturer::open(const CaptureTarget& target, int width, int height) {
 #endif
 }
 
+void DxgiCapturerPrivate::Close() {
+    if (!_bInit) return;
+    _bInit = false;
+    _nv12Buffers.Clear();
+    _xrgbBuffers.Clear();
+    _rgbToNv12.Close();
+    Free(_nv12Frame, [this] { av_frame_free(&_nv12Frame); });
+    Free(_xrgbFrame, [this] { av_frame_free(&_xrgbFrame); });
+    Free(_hDeskDupl, [this] { _hDeskDupl->Release(); });
+    Free(_hDevice, [this] { _hDevice->Release(); });
+    Free(_hContext, [this] { _hContext->Release(); });
+}
+
 void DxgiCapturer::close() {
 #ifdef PLATFORM_WINDOWS
     d->Close();
@@ -254,4 +310,4 @@ AVFrame* DxgiCapturer::getFrame() {
 #endif
 }
 } // namespace video
-} // namespace avrecorder 
+} // namespace avrecorder

+ 8 - 2
AvRecorder/capturer/video/wgc/App.cpp

@@ -61,7 +61,10 @@ bool App::StartCaptureWindow(HWND hwnd, int width, int height)
 {
     Close();
     auto item = CreateCaptureItemForWindow(hwnd);
-    __CheckBool(item);
+    if (!item) {
+        __DebugPrint("CreateCaptureItemForWindow returned null");
+        return false;
+    }
     m_capture = new SimpleCapture(m_device, item, width, height);
     auto surface = m_capture->CreateSurface(m_compositor);
     m_brush.Surface(surface);
@@ -81,7 +84,10 @@ bool App::StartCaptureMonitor(HMONITOR monitor, int width, int height)
 {
     Close();
     auto item = CreateCaptureItemForMonitor(monitor);
-    __CheckBool(item);
+    if (!item) {
+        __DebugPrint("CreateCaptureItemForMonitor returned null");
+        return false;
+    }
     m_capture = new SimpleCapture(m_device, item, width, height);
     auto surface = m_capture->CreateSurface(m_compositor);
     m_brush.Surface(surface);

+ 10 - 2
AvRecorder/capturer/video/wgc/SimpleCapture.cpp

@@ -92,8 +92,16 @@ SimpleCapture::SimpleCapture(
     m_rgbToNv12.Open(d3dDevice.get(), m_d3dContext.get(), inputColorSpace, outputColorSpace);
     m_nv12Frame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_NV12, width, height);
     m_xrgbFrame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_BGR0, width, height);
-    __CheckNo(m_nv12Frame);
-    __CheckNo(m_xrgbFrame);
+    if (!m_nv12Frame) {
+        qDebug() << "Alloc NV12 frame failed";
+        m_isCapture = false;
+        return;
+    }
+    if (!m_xrgbFrame) {
+        qDebug() << "Alloc XRGB frame failed";
+        m_isCapture = false;
+        return;
+    }
     m_isCapture = true;
     m_cnt = 5;
 }

+ 3 - 1
AvRecorder/d3d/buffer_filler.cpp

@@ -37,7 +37,9 @@ bool BufferFiller::Fill(ID3D11Device* device, D3D11_TEXTURE2D_DESC desc, int max
         _buffers.push_back(dstImg);
     }
 
-    __CheckBool(!_buffers.empty());
+    if (_buffers.empty()) {
+        return false;
+    }
     _copyIdx = 0;
     _mapIdx = (_copyIdx + 1) % _buffers.size();
     return true;

+ 21 - 7
AvRecorder/d3d/gen_frame.cpp

@@ -17,13 +17,19 @@ bool GenNv12Frame(ID3D11Device* device,
     if (FAILED(device->CreateTexture2D(&desc, nullptr, nv12Img.put()))) {
         return false;
     }
-    __CheckBool(SUCCEEDED(rgbToNv12.Convert(img, nv12Img.get())));
+    if (!CheckHR(rgbToNv12.Convert(img, nv12Img.get()), "rgbToNv12.Convert")) {
+        return false;
+    }
     // 填充缓冲区
-    __CheckBool(buffers.Fill(device, desc));
+    if (!buffers.Fill(device, desc)) {
+        return false;
+    }
 
     ctx->CopyResource(buffers.GetCopy(), nv12Img.get());
     D3D11_MAPPED_SUBRESOURCE resource;
-    __CheckBool(SUCCEEDED(ctx->Map(buffers.GetMap(), 0, D3D11_MAP_READ, 0, &resource)));
+    if (!CheckHR(ctx->Map(buffers.GetMap(), 0, D3D11_MAP_READ, 0, &resource), "ID3D11DeviceContext::Map")) {
+        return false;
+    }
     auto height = std::min(outFrame->height, (int)desc.Height);
     auto width = outFrame->width;
     auto srcLinesize = resource.RowPitch;
@@ -50,16 +56,22 @@ bool GenNv12Frame(ID3D11Device* device,
 
     __mtx.unlock();
     ctx->Unmap(buffers.GetMap(), 0);
-    __CheckBool(buffers.Reset());
+    if (!buffers.Reset()) {
+        return false;
+    }
     return true;
 }
 bool GenRgbFrame(ID3D11Device* device, ID3D11DeviceContext* ctx, const D3D11_TEXTURE2D_DESC& desc,
     ID3D11Texture2D* img, BufferFiller& buffers, AVFrame*& outFrame)
 {
-    __CheckBool(buffers.Fill(device, desc));
+    if (!buffers.Fill(device, desc)) {
+        return false;
+    }
     ctx->CopyResource(buffers.GetCopy(), img);
     D3D11_MAPPED_SUBRESOURCE resource;
-    __CheckBool(SUCCEEDED(ctx->Map(buffers.GetMap(), 0, D3D11_MAP_READ, 0, &resource)));
+    if (!CheckHR(ctx->Map(buffers.GetMap(), 0, D3D11_MAP_READ, 0, &resource), "ID3D11DeviceContext::Map")) {
+        return false;
+    }
     auto height = std::min(outFrame->height, (int)desc.Height);
     auto width = outFrame->width;
     auto srcLinesize = resource.RowPitch;
@@ -76,6 +88,8 @@ bool GenRgbFrame(ID3D11Device* device, ID3D11DeviceContext* ctx, const D3D11_TEX
     }
     __mtx.unlock();
     ctx->Unmap(buffers.GetMap(), 0);
-    __CheckBool(buffers.Reset());
+    if (!buffers.Reset()) {
+        return false;
+    }
     return true;
 }

+ 8 - 8
AvRecorder/encoder/audio_encoder.cpp

@@ -21,7 +21,8 @@ void Encoder<MediaType::AUDIO>::_FindUsableEncoders()
     param.bitRate = 128000; // 128 kbps
     Encoder encoder;
     AVFormatContext* fmtCtx = nullptr;
-    __CheckNo(avformat_alloc_output_context2(&fmtCtx, nullptr, nullptr, "test.mp4") >= 0);
+    int ctxRet = avformat_alloc_output_context2(&fmtCtx, nullptr, nullptr, "test.mp4");
+    if (ctxRet < 0) { qDebug() << "AudioEncoder: Failed to alloc output context for testing encoders"; return; }
 
     for (const auto& name : _encoderNames) {
         param.name = name;
@@ -85,9 +86,6 @@ bool Encoder<MediaType::AUDIO>::Open(const Param& audioParam, AVFormatContext* f
 
 void Encoder<MediaType::AUDIO>::Close()
 {
-    if (_codecCtx != nullptr) {
-        avcodec_free_context(&_codecCtx);
-    }
     Free(_codecCtx, [this] { avcodec_free_context(&_codecCtx); });
 }
 
@@ -103,9 +101,10 @@ bool Encoder<MediaType::AUDIO>::_Init(const Param& audioParam, AVFormatContext*
     if (!_codec) {
         _codec = avcodec_find_encoder(AV_CODEC_ID_AAC);
     }
-    __CheckBool(_codec);
+    if (!_codec) { __DebugPrint("audio avcodec_find_encoder failed"); return false; }
     // codeccontext
-    __CheckBool(_codecCtx = avcodec_alloc_context3(_codec));
+    _codecCtx = avcodec_alloc_context3(_codec);
+    if (!_codecCtx) { __DebugPrint("avcodec_alloc_context3 failed"); return false; }
     _codecCtx->sample_fmt = AV_SAMPLE_FMT_FLTP;
     _codecCtx->bit_rate = audioParam.bitRate;
     _codecCtx->sample_rate = AUDIO_SAMPLE_RATE;
@@ -123,13 +122,14 @@ bool Encoder<MediaType::AUDIO>::_Init(const Param& audioParam, AVFormatContext*
 bool Encoder<MediaType::AUDIO>::PushFrame(AVFrame* frame, bool isEnd, uint64_t pts)
 {
     if (!isEnd) {
-        __CheckBool(frame);
+        if (!frame) { return false; }
     } else {
         frame = nullptr;
     }
     if (frame != nullptr) {
         frame->pts = pts;
     }
-    __CheckBool(avcodec_send_frame(_codecCtx, frame) >= 0);
+    int sendRet = avcodec_send_frame(_codecCtx, frame);
+    if (sendRet < 0) { __DebugPrint("avcodec_send_frame failed: %d", sendRet); return false; }
     return true;
 }

+ 80 - 43
AvRecorder/encoder/audio_mixer.cpp

@@ -106,14 +106,21 @@ bool Resampler::Open(int inChannelNums,
                      int outNbSample)
 {
     Close();
-    __CheckBool(_swrCtx = swr_alloc());
+    _swrCtx = swr_alloc();
+    if (!_swrCtx) {
+        __DebugPrint("swr_alloc failed");
+        return false;
+    }
 
     AVChannelLayout tmpLayout;
     av_channel_layout_default(&tmpLayout, inChannelNums);
     av_opt_set_chlayout(_swrCtx, "in_chlayout", &tmpLayout, 0);
     av_opt_set_int(_swrCtx, "in_sample_rate", inSampleRate, 0);
     av_opt_set_sample_fmt(_swrCtx, "in_sample_fmt", inFmt, 0);
-    __CheckBool(_fromQueue.Init(inChannelNums, inSampleRate, inFmt, inSampleRate / 100 * 2));
+    if (!_fromQueue.Init(inChannelNums, inSampleRate, inFmt, inSampleRate / 100 * 2)) {
+        __DebugPrint("_fromQueue.Init failed");
+        return false;
+    }
 
     av_channel_layout_default(&tmpLayout, outChannelNums);
     av_opt_set_chlayout(_swrCtx, "out_chlayout", &tmpLayout, 0);
@@ -124,11 +131,20 @@ bool Resampler::Open(int inChannelNums,
         __DebugPrint("swr_init(_swrCtx) failed\n");
         return false;
     }
-    __CheckBool(_toQueue.Init(outChannelNums, outSampleRate, outFmt, outNbSample));
-    __CheckBool(_swrFrame = Frame<MediaType::AUDIO>::Alloc(outFmt,
+    if (!_toQueue.Init(outChannelNums, outSampleRate, outFmt, outNbSample)) {
+        __DebugPrint("_toQueue.Init failed");
+        Close();
+        return false;
+    }
+    _swrFrame = Frame<MediaType::AUDIO>::Alloc(outFmt,
                                                            &tmpLayout,
                                                            outSampleRate,
-                                                           outSampleRate / 100 * 2));
+                                                           outSampleRate / 100 * 2);
+    if (!_swrFrame) {
+        __DebugPrint("Alloc _swrFrame failed");
+        Close();
+        return false;
+    }
 
     return true;
 }
@@ -139,40 +155,14 @@ void Resampler::Close()
     Free(_swrFrame, [this] { av_frame_free(&_swrFrame); });
 }
 
-bool Resampler::Convert(uint8_t* data, int size)
-{
-    std::vector<Frame<MediaType::AUDIO>> ret;
-    if (data == nullptr) {
-        return false;
-    }
-    _fromQueue.Push(data, size);
-    for (; true;) { // 转换
-        auto frame = _fromQueue.Pop();
-        if (frame.frame == nullptr) {
-            break;
-        }
-        int ret = swr_convert(_swrCtx,
-                              _swrFrame->data,
-                              _swrFrame->nb_samples,
-                              (const uint8_t**) frame.frame->data,
-                              frame.frame->nb_samples);
-        if (ret < 0) {
-            char buffer[AV_ERROR_MAX_STRING_SIZE];
-            av_strerror(ret, buffer, sizeof(buffer));
-            qDebug() << "swr_convert ----------" << ret << QString::fromLatin1(buffer);
-            break;
-        }
-
-        _toQueue.Push(_swrFrame->data[0], _swrFrame->linesize[0]);
-    }
-    return true;
-}
-
 AVFrame* AudioMixer::Convert(uint32_t index, uint8_t* inBuf, uint32_t size)
 {
     std::lock_guard<std::mutex> locker(_mutex);
     auto iter = _audioInputInfos.find(index);
-    __CheckNullptr(iter != _audioInputInfos.end());
+    if (iter == _audioInputInfos.end()) {
+        __DebugPrint("Audio input index %u not found", index);
+        return nullptr;
+    }
 
     // // 添加调试信息
     // static int debugCounter = 0;
@@ -180,7 +170,10 @@ AVFrame* AudioMixer::Convert(uint32_t index, uint8_t* inBuf, uint32_t size)
     //     qDebug() << "AudioMixer::Convert - Input size:" << size << "bytes, Input index: " << index;
     // }
 
-    __CheckNullptr(iter->second.resampler->Convert(inBuf, size));
+    if (!iter->second.resampler->Convert(inBuf, size)) {
+        __DebugPrint("Resampler::Convert failed");
+        return nullptr;
+    }
     return _AdjustVolume() ? _outputFrame : nullptr;
 }
 
@@ -264,9 +257,15 @@ bool AudioMixer::AddAudioInput(uint32_t index,
                                AVSampleFormat format)
 {
     std::lock_guard<std::mutex> locker(_mutex);
-    __CheckBool(!_inited);
+    if (_inited) {
+        __DebugPrint("AddAudioInput called after init");
+        return false;
+    }
     // 根据index保存是否已经存在
-    __CheckBool(_audioInputInfos.find(index) == _audioInputInfos.end());
+    if (_audioInputInfos.find(index) != _audioInputInfos.end()) {
+        __DebugPrint("AddAudioInput duplicate index %u", index);
+        return false;
+    }
 
     auto& filterInfo = _audioInputInfos[index];
     // 设置音频相关参数
@@ -284,7 +283,10 @@ bool AudioMixer::AddAudioOutput(const uint32_t sampleRate,
                                 const AVSampleFormat format)
 {
     std::lock_guard<std::mutex> locker(_mutex);
-    __CheckBool(!_inited);
+    if (_inited) {
+        __DebugPrint("AddAudioOutput called after init");
+        return false;
+    }
     // 设置音频相关参数
     _audioOutputInfo.sampleRate = sampleRate;
     _audioOutputInfo.channels = channels;
@@ -303,21 +305,28 @@ bool AudioMixer::SetOutFrameSize(int outFrameSize)
     for (auto&& filterInfoPair : _audioInputInfos) {
         auto&& filterInfo = filterInfoPair.second;
         filterInfo.resampler = std::make_unique<Resampler>();
-        __CheckBool(filterInfo.resampler->Open(filterInfo.channels,
+        if (!filterInfo.resampler->Open(filterInfo.channels,
                                                filterInfo.sampleRate,
                                                filterInfo.format,
                                                _audioOutputInfo.channels,
                                                _audioOutputInfo.sampleRate,
                                                _audioOutputInfo.format,
-                                               outFrameSize));
+                                               outFrameSize)) {
+            __DebugPrint("Resampler::Open failed for input %s", filterInfo.name.c_str());
+            return false;
+        }
     }
     AVChannelLayout tmpLayout;
     av_channel_layout_default(&tmpLayout, _audioOutputInfo.channels);
     Free(_outputFrame, [this] { av_frame_free(&_outputFrame); });
-    __CheckBool(_outputFrame = Frame<MediaType::AUDIO>::Alloc(_audioOutputInfo.format,
+    _outputFrame = Frame<MediaType::AUDIO>::Alloc(_audioOutputInfo.format,
                                                               &tmpLayout,
                                                               _audioOutputInfo.sampleRate,
-                                                              outFrameSize));
+                                                              outFrameSize);
+    if (!_outputFrame) {
+        __DebugPrint("Alloc _outputFrame failed");
+        return false;
+    }
     _inited = true;
     return true;
 }
@@ -340,3 +349,31 @@ AudioMixer::AudioInfo* AudioMixer::GetInputInfo(uint32_t index)
     auto iter = _audioInputInfos.find(index);
     return iter == _audioInputInfos.end() ? nullptr : &(iter->second);
 }
+
+bool Resampler::Convert(uint8_t* data, int size)
+{
+    if (data == nullptr) {
+        return false;
+    }
+    _fromQueue.Push(data, size);
+    for (;;) { // 转换
+        auto frame = _fromQueue.Pop();
+        if (frame.frame == nullptr) {
+            break;
+        }
+        int ret = swr_convert(_swrCtx,
+                              _swrFrame->data,
+                              _swrFrame->nb_samples,
+                              (const uint8_t**) frame.frame->data,
+                              frame.frame->nb_samples);
+        if (ret < 0) {
+            char buffer[AV_ERROR_MAX_STRING_SIZE];
+            av_strerror(ret, buffer, sizeof(buffer));
+            qDebug() << "swr_convert failed" << ret << QString::fromLatin1(buffer);
+            return false;
+        }
+
+        _toQueue.Push(_swrFrame->data[0], _swrFrame->linesize[0]);
+    }
+    return true;
+}

+ 21 - 18
AvRecorder/encoder/video_encoder.cpp

@@ -43,16 +43,17 @@ bool Encoder<MediaType::VIDEO>::Open(const Param& encodeParam, AVFormatContext*
 bool Encoder<MediaType::VIDEO>::PushFrame(AVFrame* frame, bool isEnd, uint64_t pts)
 {
     if (!isEnd) {
-        __CheckBool(_Trans(frame));
+        if (!_Trans(frame)) { return false; }
         frame = _bufferFrame;
-        __CheckBool(frame);
+        if (!frame) { return false; }
     } else {
         frame = nullptr; // 直接刷新编码器缓存
     }
     if (frame != nullptr) {
         frame->pts = pts;
     }
-    __CheckBool(avcodec_send_frame(_codecCtx, frame) >= 0);
+    int sendRet = avcodec_send_frame(_codecCtx, frame);
+    if (sendRet < 0) { __DebugPrint("avcodec_send_frame failed: %d", sendRet); return false; }
     return true;
 }
 
@@ -65,10 +66,6 @@ void Encoder<MediaType::VIDEO>::AfterEncode()
 
 void Encoder<MediaType::VIDEO>::Close()
 {
-    if (_codecCtx != nullptr) {
-        avcodec_free_context(&_codecCtx);
-    }
-
     Free(_codecCtx, [this] { avcodec_free_context(&_codecCtx); });
     Free(_hwDeviceCtx, [this] { av_buffer_unref(&_hwDeviceCtx); });
     _converter = nullptr;
@@ -95,7 +92,8 @@ void Encoder<MediaType::VIDEO>::_FindUsableEncoders()
     Encoder encoder;
     AVFormatContext* fmtCtx = nullptr;
 
-    __CheckNo(avformat_alloc_output_context2(&fmtCtx, nullptr, nullptr, "test.mp4") >= 0);
+    int ctxRet = avformat_alloc_output_context2(&fmtCtx, nullptr, nullptr, "test.mp4");
+    if (ctxRet < 0) { qDebug() << "VideoEncoder: Failed to alloc output context for testing encoders"; return; }
     
     // 测试所有编码器,不做任何假设
     for (const auto& name : _encoderNames) {
@@ -162,8 +160,10 @@ bool Encoder<MediaType::VIDEO>::_Init(const Param& encodeParam, AVFormatContext*
         __DebugPrint("av_hwdevice_ctx_create failed\n");
         return false;
     }
-    __CheckBool(_codec = avcodec_find_encoder_by_name(encodeParam.name.c_str()));
-    __CheckBool(_codecCtx = avcodec_alloc_context3(_codec));
+    _codec = avcodec_find_encoder_by_name(encodeParam.name.c_str());
+    if (!_codec) { __DebugPrint("avcodec_find_encoder_by_name failed: %s", encodeParam.name.c_str()); return false; }
+    _codecCtx = avcodec_alloc_context3(_codec);
+    if (!_codecCtx) { __DebugPrint("avcodec_alloc_context3 failed"); return false; }
     _codecCtx->bit_rate = encodeParam.bitRate;
     _codecCtx->width = encodeParam.width;
     _codecCtx->height = encodeParam.height;
@@ -205,7 +205,7 @@ bool Encoder<MediaType::VIDEO>::_Init(const Param& encodeParam, AVFormatContext*
         }
     }
 
-    __CheckBool(!_isHardware || _SetHwFrameCtx());
+    if (_isHardware && !_SetHwFrameCtx()) { return false; }
     return true;
 }
 bool Encoder<MediaType::VIDEO>::_SetHwFrameCtx()
@@ -213,7 +213,8 @@ bool Encoder<MediaType::VIDEO>::_SetHwFrameCtx()
     AVBufferRef* hwFramesRef;
     AVHWFramesContext* framesCtx = nullptr;
 
-    __CheckBool(hwFramesRef = av_hwframe_ctx_alloc(_hwDeviceCtx));
+    hwFramesRef = av_hwframe_ctx_alloc(_hwDeviceCtx);
+    if (!hwFramesRef) { __DebugPrint("av_hwframe_ctx_alloc failed"); return false; }
     framesCtx = (AVHWFramesContext*) (hwFramesRef->data);
     framesCtx->format = _pixFmt;
     framesCtx->sw_format = AV_PIX_FMT_NV12;
@@ -225,7 +226,8 @@ bool Encoder<MediaType::VIDEO>::_SetHwFrameCtx()
         av_buffer_unref(&hwFramesRef);
         return false;
     }
-    __CheckBool(_codecCtx->hw_frames_ctx = av_buffer_ref(hwFramesRef));
+    _codecCtx->hw_frames_ctx = av_buffer_ref(hwFramesRef);
+    if (!_codecCtx->hw_frames_ctx) { __DebugPrint("av_buffer_ref failed"); av_buffer_unref(&hwFramesRef); return false; }
     av_buffer_unref(&hwFramesRef);
     return true;
 }
@@ -248,21 +250,22 @@ bool Encoder<MediaType::VIDEO>::_Trans(AVFrame* frame)
     }
     if (_isHardware) {
         _bufferFrame = _ToHardware();
-        __CheckBool(_bufferFrame);
+        if (!_bufferFrame) { return false; }
     }
     return true;
 }
 
 AVFrame* Encoder<MediaType::VIDEO>::_ToHardware()
 {
-    __CheckBool(_hwDeviceCtx);
-    __CheckBool(_codecCtx && _codecCtx->hw_frames_ctx);
+    if (!_hwDeviceCtx) { __DebugPrint("_hwDeviceCtx is null"); return nullptr; }
+    if (!(_codecCtx && _codecCtx->hw_frames_ctx)) { __DebugPrint("hw_frames_ctx is null"); return nullptr; }
     if (_hwFrame == nullptr) {
-        __CheckBool(_hwFrame = av_frame_alloc());
+        _hwFrame = av_frame_alloc();
+        if (!_hwFrame) { __DebugPrint("av_frame_alloc failed"); return nullptr; }
         _hwFrame->format = ((AVHWFramesContext*)_codecCtx->hw_frames_ctx->data)->format;
         _hwFrame->width = _codecCtx->width;
         _hwFrame->height = _codecCtx->height;
-        __CheckBool(av_hwframe_get_buffer(_codecCtx->hw_frames_ctx, _hwFrame, 0) >= 0);
+        if (av_hwframe_get_buffer(_codecCtx->hw_frames_ctx, _hwFrame, 0) < 0) { __DebugPrint("av_hwframe_get_buffer failed"); return nullptr; }
     }
     if (av_hwframe_transfer_data(_hwFrame, _bufferFrame, 0) < 0) {
         __DebugPrint("hw transfer failed\n");

+ 6 - 6
AvRecorder/muxer/av_muxer.cpp

@@ -366,7 +366,7 @@ int AvMuxer::AddAudioStream(const Encoder<MediaType::AUDIO>::Param& param)
 bool AvMuxer::Write(AVFrame* frame, int streamIndex, bool isEnd)
 {
     std::lock_guard<std::mutex> lk(_mtx);
-    __CheckBool(_infos.size() > streamIndex);
+    if (!(streamIndex >= 0 && _infos.size() > (size_t)streamIndex)) { return false; }
     auto&& info = _infos[streamIndex];
     if (info.isEnd) {
         return true;
@@ -375,7 +375,7 @@ bool AvMuxer::Write(AVFrame* frame, int streamIndex, bool isEnd)
         info.isEnd = isEnd;
         frame = nullptr;
     }
-    __CheckBool(info.encoder);
+    if (!info.encoder) { return false; }
 
     // 只在有多个活跃流且音频流有数据时才做同步检查
     int activeStreamCount = 0;
@@ -441,14 +441,14 @@ void AvMuxer::Close()
         __DebugPrint("stream: %d, time:%f", index, double(_infos[index].pts) / _infos[index].fps);
     }
     if (_isOpenFile) {
-        __CheckNo(av_write_trailer(_fmtCtx) >= 0);
+        int tr = av_write_trailer(_fmtCtx);
+        if (tr < 0) { __DebugPrint("av_write_trailer failed: %d", tr); }
         Free(_fmtCtx->pb, [this] { avio_closep(&_fmtCtx->pb); });
     }
     _isOpenFile = false;
 
     for (auto&& info : _infos) {
-        info.encoder->Close();
-        Free(info.encoder, [&info] {info.encoder->Close(); delete info.encoder; });
+        Free(info.encoder, [&info] { info.encoder->Close(); delete info.encoder; });
     }
     _infos.clear();
     Free(_fmtCtx, [this] { avformat_free_context(_fmtCtx); });
@@ -483,7 +483,7 @@ bool AvMuxer::_AddStream(Info& info)
 
 AVCodecContext* AvMuxer::GetCodecCtx(int streamIndex)
 {
-    __CheckNullptr(streamIndex >= 0 && _infos.size() > streamIndex);
+    if (!(streamIndex >= 0 && _infos.size() > (size_t)streamIndex)) { __DebugPrint("GetCodecCtx invalid streamIndex: %d", streamIndex); return nullptr; }
     return _infos[streamIndex].encoder->GetCtx();
 }
 

+ 17 - 5
AvRecorder/recorder/audio_recorder.cpp

@@ -63,16 +63,25 @@ bool AudioRecorder::Open(const std::vector<AudioCapturer::Type>& deviceTypes,
                      << "Got:" << format.channels;
         }
         
-        __CheckBool(_mixer.AddAudioInput(index,
+        if (!_mixer.AddAudioInput(index,
                                           format.sampleRate,
                                           format.channels,
                                           format.bitsPerSample,
-                                          _GetAVSampleFormat(format.bitsPerSample)));
+                                          _GetAVSampleFormat(format.bitsPerSample))) {
+            qDebug() << "AddAudioInput failed for index" << index;
+            return false;
+        }
     }
 
-    __CheckBool(_mixer.AddAudioOutput(sampleRate, channels, bitsPerSample, format));
+    if (!_mixer.AddAudioOutput(sampleRate, channels, bitsPerSample, format)) {
+        qDebug() << "AddAudioOutput failed";
+        return false;
+    }
     _param = param;
-    __CheckBool(_mixer.SetOutFrameSize(1024));
+    if (!_mixer.SetOutFrameSize(1024)) {
+        qDebug() << "SetOutFrameSize failed";
+        return false;
+    }
 
     // 启动所有成功初始化的音频捕获器
     for (auto capturer : m_audioCapturers) {
@@ -118,7 +127,10 @@ bool AudioRecorder::LoadMuxer(AvMuxer& muxer)
     }
     
     int streamIndex = muxer.AddAudioStream(_param);
-    __CheckBool(streamIndex != -1);
+    if (streamIndex == -1) {
+        qDebug() << "AddAudioStream failed";
+        return false;
+    }
     
     _muxers.emplace_back(&muxer, streamIndex);
     

+ 10 - 8
AvRecorder/recorder/video_recorder.cpp

@@ -39,22 +39,24 @@ bool VideoRecorder::Open(int monitorIdx,
 
 bool VideoRecorder::_Open(Encoder<MediaType::VIDEO>::Param& param)
 {
-    __CheckBool(_encodeFrame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_NV12,
+    _encodeFrame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_NV12,
                                                               _capturer.getFrame()
                                                                   ? _capturer.getFrame()->width
                                                                   : param.width,
                                                               _capturer.getFrame()
                                                                   ? _capturer.getFrame()->height
-                                                                  : param.height));
+                                                                  : param.height);
+    if (!_encodeFrame) { return false; }
     {
         std::lock_guard<std::mutex> renderLk(_renderMtx);
-        __CheckBool(_renderFrame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_NV12,
+        _renderFrame = Frame<MediaType::VIDEO>::Alloc(AV_PIX_FMT_NV12,
                                                                   _capturer.getFrame()
                                                                       ? _capturer.getFrame()->width
                                                                       : param.width,
                                                                   _capturer.getFrame()
                                                                       ? _capturer.getFrame()->height
-                                                                      : param.height));
+                                                                      : param.height);
+        if (!_renderFrame) { return false; }
     }
     // 捕获定时器和帧获取逻辑
     _captureTimer.Start(param.fps, [this] {
@@ -64,10 +66,10 @@ bool VideoRecorder::_Open(Encoder<MediaType::VIDEO>::Param& param)
             if (srcFrame->format != _encodeFrame->format) {
                 std::lock_guard<std::mutex> renderLk(_renderMtx);
                 Free(_encodeFrame, [this] { av_frame_free(&_encodeFrame); });
-                __CheckNo(
-                    _encodeFrame = Frame<MediaType::VIDEO>::Alloc(AVPixelFormat(srcFrame->format),
+                _encodeFrame = Frame<MediaType::VIDEO>::Alloc(AVPixelFormat(srcFrame->format),
                                                                   srcFrame->width,
-                                                                  srcFrame->height));
+                                                                  srcFrame->height);
+                if (!_encodeFrame) { return; }
             }
             av_frame_copy(_encodeFrame, srcFrame);
         }
@@ -106,7 +108,7 @@ bool VideoRecorder::LoadMuxer(AvMuxer& muxer)
     }
     
     int streamIndex = muxer.AddVideoStream(_param);
-    __CheckBool(streamIndex != -1);
+    if (streamIndex == -1) { return false; }
     
     _muxers.emplace_back(&muxer, streamIndex);
     return true;

+ 26 - 14
AvRecorder/ui/av_recorder.cpp

@@ -151,7 +151,10 @@ void AvRecorder::initConnect()
             fileName += QDateTime::currentDateTime().toString("yyyy-MM-dd-hh-mm-ss").toStdString()
                         + "." + format;
             // fileName += std::string("test.") + format;
-            __CheckNo(startStream(fileName, format));
+            if (!startStream(fileName, format)) {
+            qDebug() << "startStream failed for recording";
+            return;
+            }
             m_liveBtn->setEnabled(false);
             m_recordBtn->setText("停止录制");
         } else {
@@ -166,13 +169,19 @@ void AvRecorder::initConnect()
             auto fileName = m_settingsParam.liveUrl + "/" + m_settingsParam.liveName;
             bool isRtsp = m_settingsParam.liveUrl.find("rtsp") != std::string::npos;
             qDebug() << "直播地址:" << QString::fromStdString(fileName);
-            __CheckNo(startStream(fileName, isRtsp ? "rtsp" : "flv"));
-
-            // 如果勾选了同步录像,则开始录像
-            if (m_syncRecordBox->isChecked()) {
-                __CheckNo(startSyncRecord());
+            if (!startStream(fileName, isRtsp ? "rtsp" : "flv")) {
+            qDebug() << "startStream failed for live";
+            return;
             }
 
+             // 如果勾选了同步录像,则开始录像
+             if (m_syncRecordBox->isChecked()) {
+             if (!startSyncRecord()) {
+             qDebug() << "startSyncRecord failed";
+             // 不阻断直播,仅提示
+             }
+             }
+
             m_recordBtn->setEnabled(false);
             m_liveBtn->setText("停止直播");
         } else {
@@ -335,15 +344,18 @@ void AvRecorder::startCapture(CaptureMethod method)
 
 void AvRecorder::dealCapture()
 {
-    __CheckNo(m_audioRecorder.Open({AudioCapturer::Microphone, AudioCapturer::Speaker},
-                                   m_settingsParam.audioParam));
-    m_microphoneWidget->setEnabled(m_audioRecorder.GetCaptureInfo(MICROPHONE_INDEX) != nullptr);
-    m_speakerWidget->setEnabled(m_audioRecorder.GetCaptureInfo(SPEAKER_INDEX) != nullptr);
-    m_fpsLabel->setText(QString("FPS: %1").arg(m_settingsParam.videoParam.fps));
-    m_videoEncodeLabel->setText(("编码器: " + m_settingsParam.videoParam.name).c_str());
-    if (m_audioEncodeLabel) {
-        m_audioEncodeLabel->setText(("音频编码器: " + m_settingsParam.audioParam.name).c_str());
+    if (!m_audioRecorder.Open({AudioCapturer::Microphone, AudioCapturer::Speaker},
+                              m_settingsParam.audioParam)) {
+        qDebug() << "AudioRecorder::Open failed";
+        return;
     }
+     m_microphoneWidget->setEnabled(m_audioRecorder.GetCaptureInfo(MICROPHONE_INDEX) != nullptr);
+     m_speakerWidget->setEnabled(m_audioRecorder.GetCaptureInfo(SPEAKER_INDEX) != nullptr);
+     m_fpsLabel->setText(QString("FPS: %1").arg(m_settingsParam.videoParam.fps));
+     m_videoEncodeLabel->setText(("编码器: " + m_settingsParam.videoParam.name).c_str());
+     if (m_audioEncodeLabel) {
+         m_audioEncodeLabel->setText(("音频编码器: " + m_settingsParam.audioParam.name).c_str());
+     }
 }
 
 void AvRecorder::stopCapture()