ソースを参照

修复正常运行

zhuizhu 6 ヶ月 前
コミット
72b1e48054

+ 100 - 0
MainPanel.cpp

@@ -862,3 +862,103 @@ bool MainPanel::eventFilter(QObject *watched, QEvent *event)
     }
     return QWidget::eventFilter(watched, event);
 }
+
+void MainPanel::startStreamingTest(const QString &roomId, bool enableRecording)
+{
+    qDebug() << "开始推流拉流测试,房间ID:" << roomId << ",启用录制:" << enableRecording;
+    
+    // 1. 创建或获取RecorderWidget实例用于推流
+    if (!m_recorderStandalone) {
+        m_recorderStandalone = new RecorderWidget();
+        // 连接推流相关信号
+        connect(m_recorderStandalone, &RecorderWidget::streamingStarted, this, [this]() {
+            qDebug() << "推流已开始";
+        });
+        connect(m_recorderStandalone, &RecorderWidget::streamingStopped, this, [this]() {
+            qDebug() << "推流已停止";
+        });
+        connect(m_recorderStandalone, &RecorderWidget::errorOccurred, this, [this](const QString &error) {
+            qDebug() << "推流错误:" << error;
+        });
+    }
+    
+    // 2. 创建或获取AVPlayerWidget实例用于拉流
+    if (!m_avPlayerStandalone) {
+        m_avPlayerStandalone = new AVPlayerWidget();
+        // 连接拉流相关信号
+        connect(m_avPlayerStandalone, &AVPlayerWidget::playStateChanged, this, [this](bool isPlaying) {
+            qDebug() << "播放状态变化:" << (isPlaying ? "播放中" : "已停止");
+        });
+        connect(m_avPlayerStandalone, &AVPlayerWidget::playError, this, [this](const QString &error) {
+            qDebug() << "播放错误:" << error;
+        });
+        connect(m_avPlayerStandalone, &AVPlayerWidget::playLoadingStarted, this, [this]() {
+            qDebug() << "开始加载流媒体";
+        });
+        connect(m_avPlayerStandalone, &AVPlayerWidget::playLoadingFinished, this, [this]() {
+            qDebug() << "流媒体加载完成";
+        });
+    }
+    
+    // 3. 配置推流参数
+    m_recorderStandalone->setLiveName(roomId);
+    
+    // 4. 配置拉流参数
+    m_avPlayerStandalone->setPlayRoomId(roomId);
+    
+    // 5. 显示推流和拉流窗口
+    m_recorderStandalone->show();
+    m_recorderStandalone->resize(800, 600);
+    m_recorderStandalone->move(0, 0);
+    m_recorderStandalone->setWindowTitle(QString("推流测试 - 房间ID: %1").arg(roomId));
+    
+    m_avPlayerStandalone->show();
+    m_avPlayerStandalone->resize(800, 600);
+    m_recorderStandalone->move(800, 600);
+    m_avPlayerStandalone->setWindowTitle(QString("拉流测试 - 房间ID: %1").arg(roomId));
+    
+    // 6. 启动推流
+    if (m_recorderStandalone->startStreaming()) {
+        qDebug() << "推流启动成功";
+    } else {
+        qDebug() << "推流启动失败";
+    }
+    
+    // 7. 延迟启动拉流,等待推流稳定
+    QTimer::singleShot(5000, this, [this]() {
+        if (m_avPlayerStandalone) {
+            m_avPlayerStandalone->startPlayAsync();
+            qDebug() << "延迟启动拉流";
+        }
+    });
+
+    // 8. 如果启用录制,同时开始录制
+    if (enableRecording) {
+        QTimer::singleShot(1000, this, [this]() {
+            if (m_recorderStandalone) {
+                m_recorderStandalone->startRecording();
+                qDebug() << "同时启动录制功能";
+            }
+        });
+    }
+}
+
+void MainPanel::stopStreamingTest()
+{
+    qDebug() << "停止推流拉流测试";
+    
+    // 1. 停止推流
+    if (m_recorderStandalone) {
+        m_recorderStandalone->stopStreaming();
+        m_recorderStandalone->stopRecording(); // 同时停止录制
+        m_recorderStandalone->hide();
+    }
+    
+    // 2. 停止拉流
+    if (m_avPlayerStandalone) {
+        m_avPlayerStandalone->stop();
+        m_avPlayerStandalone->hide();
+    }
+    
+    qDebug() << "推流拉流测试已停止";
+}

+ 14 - 1
MainPanel.h

@@ -111,10 +111,23 @@ public slots:
     void showChatStandalone();
     
     /**
-     * @brief 将聊天窗口嵌入到主面板中
+     * @brief 显示嵌入式聊天窗口
      */
     void showChatEmbedded();
 
+    // ========== 测试功能 ==========
+    /**
+     * @brief 开始推流拉流测试
+     * @param roomId 房间ID,用于推流和拉流的标识
+     * @param enableRecording 是否同时启用录制功能
+     */
+    void startStreamingTest(const QString &roomId = "0198da3f-5900-78e3-8160-2b7a149cc772", bool enableRecording = false);
+    
+    /**
+     * @brief 停止推流拉流测试
+     */
+    void stopStreamingTest();
+
 signals:
     // ========== 信号 ==========
     /**

+ 55 - 24
libs/AVPlayer/av_player.cpp

@@ -282,17 +282,20 @@ int AVPlayer::initVideo()
     m_videoIndex = m_decoder->videoIndex();
     m_fmtCtx = m_decoder->formatContext();
 
-    m_imageWidth = m_videoCodecPar->width;
-    m_imageHeight = m_videoCodecPar->height;
+    m_imageWidth = m_videoCodecPar ? m_videoCodecPar->width : 0;
+    m_imageHeight = m_videoCodecPar ? m_videoCodecPar->height : 0;
 
     m_dstPixFmt = AV_PIX_FMT_YUV422P;
     // 改用快速缩放算法以降低转换时延
     m_swsFlags = SWS_FAST_BILINEAR;
 
-    //分配存储转换后帧数据的buffer内存
-    int bufSize = av_image_get_buffer_size(m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
-    m_buffer = (uint8_t*) av_realloc(m_buffer, bufSize * sizeof(uint8_t));
-    av_image_fill_arrays(m_pixels, m_pitch, m_buffer, m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
+    // 仅当分辨率有效时分配缓存,否则延迟到首帧分配
+    if (m_imageWidth > 0 && m_imageHeight > 0) {
+        int bufSize = av_image_get_buffer_size(m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
+        m_buffer = (uint8_t*) av_realloc(m_buffer, bufSize * sizeof(uint8_t));
+        av_image_fill_arrays(m_pixels, m_pitch, m_buffer, m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
+    }
+
     //视频帧播放回调递插入线程池任务队列
     if (!ThreadPool::addTask(std::bind(&AVPlayer::videoCallback, this, std::placeholders::_1),
                              std::make_shared<int>(0))) {
@@ -404,10 +407,23 @@ void AVPlayer::initAVClock()
 void AVPlayer::displayImage(AVFrame* frame)
 {
     if (frame) {
-        //判断若是否需要格式转换
-        if ((m_videoCodecPar->width != m_imageWidth || m_videoCodecPar->height != m_imageHeight
-             || m_videoCodecPar->format != m_dstPixFmt)
-            && !m_swsCtx) {
+        // 首帧兜底:探测不到分辨率时,用首帧尺寸初始化并分配缓存
+        if (m_imageWidth <= 0 || m_imageHeight <= 0) {
+            m_imageWidth = frame->width;
+            m_imageHeight = frame->height;
+
+            int bufSize = av_image_get_buffer_size(m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
+            m_buffer = (uint8_t*) av_realloc(m_buffer, bufSize * sizeof(uint8_t));
+            av_image_fill_arrays(m_pixels, m_pitch, m_buffer, m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
+        }
+
+        // 判断是否需要像素格式/分辨率转换
+        bool needConvert =
+            (frame->format != m_dstPixFmt) ||
+            (frame->width != m_imageWidth) ||
+            (frame->height != m_imageHeight);
+
+        if (needConvert) {
             m_swsCtx = sws_getCachedContext(m_swsCtx,
                                             frame->width,
                                             frame->height,
@@ -419,24 +435,39 @@ void AVPlayer::displayImage(AVFrame* frame)
                                             nullptr,
                                             nullptr,
                                             nullptr);
-        }
-        if (m_swsCtx) {
-            sws_scale(m_swsCtx, frame->data, frame->linesize, 0, frame->height, m_pixels, m_pitch);
-            uint8_t* planes[4] = { m_pixels[0], m_pixels[1], m_pixels[2], m_pixels[3] };
-            int lines[4] = { m_pitch[0], m_pitch[1], m_pitch[2], m_pitch[3] };
-            emit frameChanged(QSharedPointer<VideoFrame>::create(m_dstPixFmt,
-                                                                 m_imageWidth,
-                                                                 m_imageHeight,
-                                                                 planes,
-                                                                 lines));
+
+            if (m_swsCtx) {
+                // 确保输出缓存已按当前目标尺寸分配
+                int bufSize = av_image_get_buffer_size(m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
+                m_buffer = (uint8_t*) av_realloc(m_buffer, bufSize * sizeof(uint8_t));
+                av_image_fill_arrays(m_pixels, m_pitch, m_buffer, m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
+
+                sws_scale(m_swsCtx, frame->data, frame->linesize, 0, frame->height, m_pixels, m_pitch);
+
+                uint8_t* planes[4] = { m_pixels[0], m_pixels[1], m_pixels[2], m_pixels[3] };
+                int lines[4] = { m_pitch[0], m_pitch[1], m_pitch[2], m_pitch[3] };
+                emit frameChanged(QSharedPointer<VideoFrame>::create(m_dstPixFmt,
+                                                                     m_imageWidth,
+                                                                     m_imageHeight,
+                                                                     planes,
+                                                                     lines));
+            } else {
+                // 回退:直接透传
+                emit frameChanged(QSharedPointer<VideoFrame>::create((AVPixelFormat) frame->format,
+                                                                     frame->width,
+                                                                     frame->height,
+                                                                     frame->data,
+                                                                     frame->linesize));
+            }
         } else {
-            // 直接使用解码帧的平面数据
-            emit frameChanged(QSharedPointer<VideoFrame>::create((AVPixelFormat)m_videoCodecPar->format,
-                                                                 m_imageWidth,
-                                                                 m_imageHeight,
+            // 无需转换,直接透传
+            emit frameChanged(QSharedPointer<VideoFrame>::create((AVPixelFormat) frame->format,
+                                                                 frame->width,
+                                                                 frame->height,
                                                                  frame->data,
                                                                  frame->linesize));
         }
+
         //记录视频时钟
         m_videoClock.setClock(frame->pts * av_q2d(m_fmtCtx->streams[m_videoIndex]->time_base));
     }

+ 15 - 12
libs/Recorder/encoder_video_nvenc.cpp

@@ -192,19 +192,22 @@ void encoder_video_nvenc::encode_loop()
             _cond_var.wait_for(lock, std::chrono::milliseconds(300));
 
         while (_ring_buffer->get(_buff, _buff_size, yuv_frame)) {
-            _frame->pkt_dts = yuv_frame.pkt_dts;
-            _frame->pkt_dts = yuv_frame.pkt_dts;
-            _frame->pts = yuv_frame.pts;
-
-            if ((error = encode(_frame, packet)) != AE_NO) {
-                if (_on_error)
-                    _on_error(error);
-
-                al_fatal("encode 264 packet failed:%d", error);
-
-                break;
+            if (yuv_frame.pts != AV_NOPTS_VALUE) {
+                _frame->pts = av_rescale_q(yuv_frame.pts, AVRational{1, AV_TIME_BASE}, _encoder_ctx->time_base);
+            } else {
+                _frame->pts = AV_NOPTS_VALUE;
             }
-        }
+            _frame->pkt_dts = _frame->pts;
+ 
+             if ((error = encode(_frame, packet)) != AE_NO) {
+                 if (_on_error)
+                     _on_error(error);
+ 
+                 al_fatal("encode 264 packet failed:%d", error);
+ 
+                 break;
+             }
+         }
 
         _cond_notify = false;
     }

+ 9 - 6
libs/Recorder/encoder_video_x264.cpp

@@ -206,9 +206,15 @@ void encoder_video_x264::encode_loop()
             _cond_var.wait_for(lock, std::chrono::milliseconds(300));
 
         while (_ring_buffer->get(_buff, _buff_size, yuv_frame)) {
-            _frame->pkt_dts = yuv_frame.pkt_dts;
-            _frame->pkt_dts = yuv_frame.pkt_dts;
-            _frame->pts = yuv_frame.pts;
+            // Normalize incoming frame timestamps to encoder time_base (1/fps)
+            // Source pts is in AV_TIME_BASE (microseconds) from capturer
+            if (yuv_frame.pts != AV_NOPTS_VALUE) {
+                _frame->pts = av_rescale_q(yuv_frame.pts, AVRational{1, AV_TIME_BASE}, _encoder_ctx->time_base);
+            } else {
+                _frame->pts = AV_NOPTS_VALUE;
+            }
+            // Keep pkt_dts consistent with pts for no-B-frames encoders
+            _frame->pkt_dts = _frame->pts;
 
             if ((error = encode(_frame, packet)) != AE_NO) {
                 if (_on_error)
@@ -223,9 +229,6 @@ void encoder_video_x264::encode_loop()
         _cond_notify = false;
     }
 
-    //flush frame in encoder
-    encode(NULL, packet);
-
     av_packet_free(&packet);
 }
 } // namespace am

+ 27 - 34
libs/Recorder/muxer_ffmpeg.cpp

@@ -327,20 +327,25 @@ void muxer_ffmpeg::on_filter_amix_data(AVFrame *frame, int)
                 resamples->sample_in = 0;
             }
         }
-    } else { //resample size is channels*frame->linesize[0],for 2 channels
+    } else { // planar: copy each channel plane into contiguous planes in buffer
+        const int channels = ffmpeg_get_frame_channels(frame);
         while (remain_len > 0) {
             copied_len = min(resamples->size - resamples->sample_in, remain_len);
             if (copied_len) {
-                memcpy(resamples->buff + resamples->sample_in / 2,
-                       frame->data[0] + (sample_len - remain_len) / 2,
-                       copied_len / 2);
-                memcpy(resamples->buff + resamples->size / 2 + resamples->sample_in / 2,
-                       frame->data[1] + (sample_len - remain_len) / 2,
-                       copied_len / 2);
+                const int copied_per_plane = copied_len / channels;
+                const int written_per_plane = resamples->sample_in / channels;
+                const int plane_size = resamples->size / channels;
+                const int src_offset = (sample_len - remain_len) / channels;
+                for (int ch = 0; ch < channels; ++ch) {
+                    memcpy(resamples->buff + ch * plane_size + written_per_plane,
+                           frame->data[ch] + src_offset,
+                           copied_per_plane);
+                }
                 resamples->sample_in += copied_len;
                 remain_len = remain_len - copied_len;
             }
 
+            //got enough pcm to encoder,resample and mix
             if (resamples->sample_in == resamples->size) {
                 _a_stream->a_enc->put(resamples->buff, resamples->size, frame);
 
@@ -360,7 +365,7 @@ void muxer_ffmpeg::on_filter_aresample_data(AVFrame *frame, int index)
     if (_running == false || !_a_stream->a_enc)
         return;
 
-    AUDIO_SAMPLE *resamples = _a_stream->a_resamples[0];
+    AUDIO_SAMPLE *resamples = _a_stream->a_resamples[index];
 
     int copied_len = 0;
     int sample_len = ffmpeg_get_buffer_size((AVSampleFormat) frame->format,
@@ -394,17 +399,21 @@ void muxer_ffmpeg::on_filter_aresample_data(AVFrame *frame, int index)
                 resamples->sample_in = 0;
             }
         }
-    } else { //resample size is channels*frame->linesize[0],for 2 channels
+    } else { // planar: copy each channel plane into contiguous planes in buffer
+        const int channels = ffmpeg_get_frame_channels(frame);
         while (remain_len > 0) {
             copied_len = min(resamples->size - resamples->sample_in, remain_len);
 
             if (copied_len) {
-                memcpy(resamples->buff + resamples->sample_in / 2,
-                       frame->data[0] + (sample_len - remain_len) / 2,
-                       copied_len / 2);
-                memcpy(resamples->buff + resamples->size / 2 + resamples->sample_in / 2,
-                       frame->data[1] + (sample_len - remain_len) / 2,
-                       copied_len / 2);
+                const int copied_per_plane = copied_len / channels;
+                const int written_per_plane = resamples->sample_in / channels;
+                const int plane_size = resamples->size / channels;
+                const int src_offset = (sample_len - remain_len) / channels;
+                for (int ch = 0; ch < channels; ++ch) {
+                    memcpy(resamples->buff + ch * plane_size + written_per_plane,
+                           frame->data[ch] + src_offset,
+                           copied_per_plane);
+                }
                 resamples->sample_in += copied_len;
                 remain_len = remain_len - copied_len;
             }
@@ -937,18 +946,8 @@ int muxer_ffmpeg::write_video(AVPacket *packet)
 
     packet->stream_index = _v_stream->st->index;
 
-    /*packet->pts = av_rescale_q_rnd(packet->pts, 
-			_v_stream->v_src->get_time_base(), 
-			{ 1,AV_TIME_BASE }, 
-			(AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
-
-		// make audio and video use one clock
-		if (_v_stream->pre_pts == (uint64_t)-1) {
-			_v_stream->pre_pts = packet->pts;
-		}*/
-
     // scale ts with timebase of base_time
-    av_packet_rescale_ts(packet, _v_stream->v_src->get_time_base(), {1, AV_TIME_BASE});
+    av_packet_rescale_ts(packet, _v_stream->v_enc->get_time_base(), {1, AV_TIME_BASE});
 
     // make audio and video use one clock
     packet->pts = packet->pts - _base_time;
@@ -975,14 +974,8 @@ int muxer_ffmpeg::write_audio(AVPacket *packet)
     std::lock_guard<std::mutex> lock(_mutex);
 
     packet->stream_index = _a_stream->st->index;
-
-    AVRational src_timebase = {1, 1};
-
-    if (_a_stream->a_filter_amix != nullptr) {
-        src_timebase = _a_stream->a_filter_amix->get_time_base();
-    } else {
-        src_timebase = _a_stream->a_filter_aresample[0]->get_time_base();
-    }
+    // 音频包的源时间基应为编码器的 time_base
+    AVRational src_timebase = _a_stream->a_enc->get_time_base();
 
     /*packet->pts = av_rescale_q_rnd(packet->pts, 
 			src_timebase,