#include "av_player.h" #include #include #include #include "threadpool.h" #include "vframe.h" #include "low_latency_config.h" // 同步阈值定义已移至配置文件(low_latency_config.h) //单帧视频时长阈值上限,用于适配低帧时同步, //帧率过低视频帧超前不适合翻倍延迟,应特殊 //处理,这里设置上限一秒10帧 //同步操作摆烂阈值上限,此时同步已无意义 AVPlayer::AVPlayer() : m_decoder(new Decoder) , m_fmtCtx(nullptr) , m_audioFrame(av_frame_alloc()) , m_imageWidth(300) , m_imageHeight(300) , m_swrCtx(nullptr) , m_swsCtx(nullptr) , m_buffer(nullptr) , m_audioBuf(nullptr) , m_duration(0) , m_volume(30) , m_exit(0) , m_pause(0) , m_playSpeed(1.0) , m_baseTimeUs(0) , m_performanceFrameCount(0) , m_lastDelayValue(0.0) { m_sonicStream = nullptr; // 初始化高精度时间基准 m_baseTimeUs = av_gettime_relative(); } AVPlayer::~AVPlayer() { av_frame_free(&m_audioFrame); clearPlayer(); delete m_decoder; if (m_swrCtx) swr_free(&m_swrCtx); if (m_swsCtx) sws_freeContext(m_swsCtx); if (m_audioBuf) av_free(m_audioBuf); if (m_buffer) av_free(m_buffer); } int AVPlayer::play(const QString& url) { clearPlayer(); if (!m_decoder->decode(url)) { qDebug() << "decode failed"; return 0; } //解码成功可获取流时长 m_duration = m_decoder->avDuration(); emit AVDurationChanged(m_duration); m_pause = 0; m_clockInitFlag = -1; // 播放开始前重置高精度时间基准与PTS归一化基线 m_baseTimeUs = av_gettime_relative(); m_frameTimerUs = 0; m_audioStartPtsUs = -1; m_videoStartPtsUs = -1; // 判断是否存在音/视频流 m_audioIndex = m_decoder->audioIndex(); m_videoIndex = m_decoder->videoIndex(); m_hasAudio = (m_audioIndex >= 0); m_hasVideo = (m_videoIndex >= 0); bool ok = false; if (m_hasAudio) { if (initSDL()) { ok = true; } else { qDebug() << "init sdl failed!"; } } if (m_hasVideo) { if (initVideo()) { ok = true; } else { qDebug() << "init video failed!"; } } // 仅音频时,主动初始化时钟 if (!m_hasVideo && m_hasAudio) { initAVClock(); } return ok ? 1 : 0; } void fillAStreamCallback(void* userdata, uint8_t* stream, int len) { memset(stream, 0, len); AVPlayer* is = (AVPlayer*) userdata; static double audioPts = 0.00; while (len > 0) { if (is->m_exit) return; if (is->m_audioBufIndex >= is->m_audioBufSize) { /*index到缓冲区末尾,重新填充数据*/ int ret = is->m_decoder->getAFrame(is->m_audioFrame); if (ret) { is->m_audioBufIndex = 0; if ((is->m_targetSampleFmt != is->m_audioFrame->format || is->m_targetChannelLayout != (int64_t)ffmpeg_get_frame_channel_layout(is->m_audioFrame) || is->m_targetFreq != is->m_audioFrame->sample_rate || is->m_targetNbSamples != is->m_audioFrame->nb_samples) && !is->m_swrCtx) { is->m_swrCtx = ffmpeg_swr_alloc_set_opts(nullptr, is->m_targetChannelLayout, is->m_targetSampleFmt, is->m_targetFreq, (int64_t)ffmpeg_get_frame_channel_layout(is->m_audioFrame), (enum AVSampleFormat) is->m_audioFrame->format, is->m_audioFrame->sample_rate, 0, nullptr); if (!is->m_swrCtx || swr_init(is->m_swrCtx) < 0) { qDebug() << "swr_init failed"; return; } } if (is->m_swrCtx) { const uint8_t** in = (const uint8_t**) is->m_audioFrame->extended_data; int out_count = (uint64_t) is->m_audioFrame->nb_samples * is->m_targetFreq / is->m_audioFrame->sample_rate + 256; int out_size = ffmpeg_get_buffer_size(is->m_targetSampleFmt, is->m_targetChannels, out_count, 0); if (out_size < 0) { qDebug() << "av_samples_get_buffer_size failed"; return; } av_fast_malloc(&is->m_audioBuf, &is->m_audioBufSize, out_size); if (!is->m_audioBuf) { qDebug() << "av_fast_malloc failed"; return; } int len2 = swr_convert(is->m_swrCtx, &is->m_audioBuf, out_count, in, is->m_audioFrame->nb_samples); if (len2 < 0) { qDebug() << "swr_convert failed"; return; } if (is->m_playSpeed != 1.0) { sonicSetSpeed(is->m_sonicStream, is->m_playSpeed); int ret = sonicWriteShortToStream(is->m_sonicStream, (short*) is->m_audioBuf, len2); int availSamples = sonicSamplesAvailable(is->m_sonicStream); if (!availSamples) { is->m_audioBufSize = is->m_audioBufIndex; continue; } int numSamples = availSamples; int bytes = numSamples * is->m_targetChannels * av_get_bytes_per_sample(is->m_targetSampleFmt); if (bytes > out_size) { av_fast_malloc(&is->m_audioBuf, &is->m_audioBufSize, bytes); } len2 = sonicReadShortFromStream(is->m_sonicStream, (short*) is->m_audioBuf, numSamples); } is->m_audioBufSize = len2 * is->m_targetChannels * av_get_bytes_per_sample(is->m_targetSampleFmt); } else { is->m_audioBufSize = ffmpeg_get_buffer_size(is->m_targetSampleFmt, is->m_targetChannels, is->m_audioFrame->nb_samples, 0); av_fast_malloc(&is->m_audioBuf, &is->m_audioBufSize, is->m_audioBufSize + 256); if (!is->m_audioBuf) { qDebug() << "av_fast_malloc failed"; return; } memcpy(is->m_audioBuf, is->m_audioFrame->data[0], is->m_audioBufSize); } audioPts = is->m_audioFrame->pts * av_q2d(is->m_fmtCtx->streams[is->m_audioIndex]->time_base); //qDebug()<m_audioPts; av_frame_unref(is->m_audioFrame); } else { //判断是否真正播放到文件末尾 if (is->m_decoder->isExit()) { emit is->AVTerminate(); } return; } } int len1 = is->m_audioBufSize - is->m_audioBufIndex; len1 = (len1 > len ? len : len1); SDL_MixAudio(stream, is->m_audioBuf + is->m_audioBufIndex, len1, is->m_volume); len -= len1; is->m_audioBufIndex += len1; stream += len1; } //记录音频时钟,转换为微秒时间戳并做归一化(首帧为0) int64_t audioPtsUs = static_cast(audioPts * 1000000.0); if (is->m_audioStartPtsUs < 0) { is->m_audioStartPtsUs = audioPtsUs; } int64_t normAudioPtsUs = audioPtsUs - is->m_audioStartPtsUs; is->m_audioClock.setClock(normAudioPtsUs); //发送时间戳变化信号(秒) uint32_t _pts = (uint32_t) audioPts; if (is->m_lastAudPts != _pts) { emit is->AVPtsChanged(_pts); is->m_lastAudPts = _pts; } } int AVPlayer::initSDL() { // 无音频流直接返回失败,调用方根据 m_hasAudio 控制 if (m_decoder->audioIndex() < 0) return 0; // 性能优化:使用更快的SDL初始化方式 if (SDL_WasInit(SDL_INIT_AUDIO) == 0) { if (SDL_Init(SDL_INIT_AUDIO) != 0) { qDebug() << "SDL_Init failed"; return 0; } } m_exit = 0; m_audioBufSize = 0; m_audioBufIndex = 0; m_lastAudPts = -1; m_audioCodecPar = m_decoder->audioCodecPar(); m_audioIndex = m_decoder->audioIndex(); m_fmtCtx = m_decoder->formatContext(); // 性能优化:使用更小的音频缓冲区减少延迟 SDL_AudioSpec wanted_spec; wanted_spec.channels = ffmpeg_get_codec_channels(m_fmtCtx->streams[m_audioIndex]); wanted_spec.freq = m_audioCodecPar->sample_rate; wanted_spec.format = AUDIO_S16SYS; wanted_spec.silence = 0; wanted_spec.callback = fillAStreamCallback; wanted_spec.userdata = this; // 使用配置文件中的音频样本数减少延迟 wanted_spec.samples = LowLatencyConfig::MIN_AUDIO_SAMPLES; // 使用配置文件中的最小音频样本数 if (SDL_OpenAudio(&wanted_spec, nullptr) < 0) { qDebug() << "SDL_OpenAudio failed"; return 0; } m_targetSampleFmt = AV_SAMPLE_FMT_S16; m_targetChannels = ffmpeg_get_codec_channels(m_fmtCtx->streams[m_audioIndex]); m_targetFreq = m_audioCodecPar->sample_rate; m_targetChannelLayout = (int64_t)ffmpeg_get_default_channel_layout(m_targetChannels); m_targetNbSamples = m_audioCodecPar->frame_size; m_audioIndex = m_decoder->audioIndex(); m_fmtCtx = m_decoder->formatContext(); m_sonicStream = sonicCreateStream(m_targetFreq, m_targetChannels); sonicSetQuality(m_sonicStream, 1); SDL_PauseAudio(0); return 1; } int AVPlayer::initVideo() { m_frameTimerUs = 0; // 使用高精度微秒时间戳 m_videoCodecPar = m_decoder->videoCodecPar(); m_videoIndex = m_decoder->videoIndex(); m_fmtCtx = m_decoder->formatContext(); m_imageWidth = m_videoCodecPar ? m_videoCodecPar->width : 0; m_imageHeight = m_videoCodecPar ? m_videoCodecPar->height : 0; m_dstPixFmt = AV_PIX_FMT_YUV422P; // 改用快速缩放算法以降低转换时延 m_swsFlags = SWS_FAST_BILINEAR; // 仅当分辨率有效时分配缓存,否则延迟到首帧分配 if (m_imageWidth > 0 && m_imageHeight > 0) { int bufSize = av_image_get_buffer_size(m_dstPixFmt, m_imageWidth, m_imageHeight, 1); m_buffer = (uint8_t*) av_realloc(m_buffer, bufSize * sizeof(uint8_t)); av_image_fill_arrays(m_pixels, m_pitch, m_buffer, m_dstPixFmt, m_imageWidth, m_imageHeight, 1); } //视频帧播放回调递插入线程池任务队列 if (!ThreadPool::addTask(std::bind(&AVPlayer::videoCallback, this, std::placeholders::_1), std::make_shared(0))) { qDebug() << "videoCallback add task failed!"; } return 1; } void AVPlayer::pause(bool isPause) { if (m_hasAudio) { if (SDL_GetAudioStatus() == SDL_AUDIO_STOPPED) return; if (isPause) { if (SDL_GetAudioStatus() == SDL_AUDIO_PLAYING) { SDL_PauseAudio(1); // 记录暂停开始时间与各时钟快照 int64_t pauseTimeUs = av_gettime_relative(); m_pauseTimeUs = pauseTimeUs - (m_baseTimeUs ? m_baseTimeUs : pauseTimeUs); m_pauseAudClockUs = m_audioClock.getClock(); m_pauseVidClockUs = m_videoClock.getClock(); m_pauseExtClockUs = m_extClock.getClock(); m_pause = 1; } } else { if (SDL_GetAudioStatus() == SDL_AUDIO_PAUSED) { SDL_PauseAudio(0); // 恢复:排除暂停时长,并将各时钟锚定到暂停前快照,避免进度跳变 int64_t resumeTimeUs = av_gettime_relative(); int64_t resumeElapsedUs = resumeTimeUs - (m_baseTimeUs ? m_baseTimeUs : resumeTimeUs); m_frameTimerUs += resumeElapsedUs - m_pauseTimeUs; // 重新锚定各时钟(微秒),排除暂停期间的墙钟时间 m_audioClock.setClock(m_pauseAudClockUs); m_videoClock.setClock(m_pauseVidClockUs); m_extClock.setClock(m_pauseExtClockUs); m_pause = 0; } } } else if (m_hasVideo) { // 仅视频:通过标志控制回放线程 if (isPause) { if (!m_pause) { int64_t pauseTimeUs = av_gettime_relative(); m_pauseTimeUs = pauseTimeUs - (m_baseTimeUs ? m_baseTimeUs : pauseTimeUs); // 记录暂停快照 m_pauseVidClockUs = m_videoClock.getClock(); m_pauseExtClockUs = m_extClock.getClock(); m_pause = 1; } } else { if (m_pause) { int64_t resumeTimeUs = av_gettime_relative(); int64_t resumeElapsedUs = resumeTimeUs - (m_baseTimeUs ? m_baseTimeUs : resumeTimeUs); m_frameTimerUs += resumeElapsedUs - m_pauseTimeUs; // 重新锚定视频与外部时钟 m_videoClock.setClock(m_pauseVidClockUs); m_extClock.setClock(m_pauseExtClockUs); m_pause = 0; } } } } void AVPlayer::clearPlayer() { if (playState() != AV_STOPPED) { m_exit = 1; if (m_hasAudio && playState() == AV_PLAYING) SDL_PauseAudio(1); m_decoder->exit(); if (m_hasAudio) SDL_CloseAudio(); if (m_swrCtx) swr_free(&m_swrCtx); if (m_swsCtx) sws_freeContext(m_swsCtx); m_swrCtx = nullptr; m_swsCtx = nullptr; if (m_sonicStream) sonicDestroyStream(m_sonicStream); m_sonicStream = nullptr; } } AVTool::MediaInfo* AVPlayer::detectMediaInfo(const QString& url) { return m_decoder->detectMediaInfo(url); } AVPlayer::PlayState AVPlayer::playState() { if (m_hasAudio) { AVPlayer::PlayState state; switch (SDL_GetAudioStatus()) { case SDL_AUDIO_PLAYING: state = AVPlayer::AV_PLAYING; break; case SDL_AUDIO_PAUSED: state = AVPlayer::AV_PAUSED; break; case SDL_AUDIO_STOPPED: state = AVPlayer::AV_STOPPED; break; default: state = AVPlayer::AV_STOPPED; break; } return state; } if (m_hasVideo) { if (m_exit) return AV_STOPPED; if (m_pause) return AV_PAUSED; return AV_PLAYING; } return AV_STOPPED; } void AVPlayer::initAVClock() { m_audioClock.setClock(0); m_videoClock.setClock(0); // 新增:初始化外部时钟为0,对齐ffplay -sync ext 行为 m_extClock.setClock(0); // 新增:重置PTS基线,确保每次开始都从0归一化 m_audioStartPtsUs = -1; m_videoStartPtsUs = -1; m_clockInitFlag = 1; } void AVPlayer::displayImage(AVFrame* frame) { if (frame) { // 首帧兜底:探测不到分辨率时,用首帧尺寸初始化并分配缓存 if (m_imageWidth <= 0 || m_imageHeight <= 0) { m_imageWidth = frame->width; m_imageHeight = frame->height; int bufSize = av_image_get_buffer_size(m_dstPixFmt, m_imageWidth, m_imageHeight, 1); m_buffer = (uint8_t*) av_realloc(m_buffer, bufSize * sizeof(uint8_t)); av_image_fill_arrays(m_pixels, m_pitch, m_buffer, m_dstPixFmt, m_imageWidth, m_imageHeight, 1); } // 判断是否需要像素格式/分辨率转换 bool needConvert = (frame->format != m_dstPixFmt) || (frame->width != m_imageWidth) || (frame->height != m_imageHeight); if (needConvert) { m_swsCtx = sws_getCachedContext(m_swsCtx, frame->width, frame->height, (enum AVPixelFormat) frame->format, m_imageWidth, m_imageHeight, m_dstPixFmt, m_swsFlags, nullptr, nullptr, nullptr); if (m_swsCtx) { // 确保输出缓存已按当前目标尺寸分配 int bufSize = av_image_get_buffer_size(m_dstPixFmt, m_imageWidth, m_imageHeight, 1); m_buffer = (uint8_t*) av_realloc(m_buffer, bufSize * sizeof(uint8_t)); av_image_fill_arrays(m_pixels, m_pitch, m_buffer, m_dstPixFmt, m_imageWidth, m_imageHeight, 1); sws_scale(m_swsCtx, frame->data, frame->linesize, 0, frame->height, m_pixels, m_pitch); uint8_t* planes[4] = { m_pixels[0], m_pixels[1], m_pixels[2], m_pixels[3] }; int lines[4] = { m_pitch[0], m_pitch[1], m_pitch[2], m_pitch[3] }; emit frameChanged(QSharedPointer::create(m_dstPixFmt, m_imageWidth, m_imageHeight, planes, lines)); } else { // 回退:直接透传 emit frameChanged(QSharedPointer::create((AVPixelFormat) frame->format, frame->width, frame->height, frame->data, frame->linesize)); } } else { // 无需转换,直接透传 emit frameChanged(QSharedPointer::create((AVPixelFormat) frame->format, frame->width, frame->height, frame->data, frame->linesize)); } //记录视频时钟,转换为微秒时间戳并做归一化(首帧为0) double videoPtsSeconds = frame->pts * av_q2d(m_fmtCtx->streams[m_videoIndex]->time_base); int64_t videoPtsUs = static_cast(videoPtsSeconds * 1000000.0); if (m_videoStartPtsUs < 0) { m_videoStartPtsUs = videoPtsUs; } int64_t normVideoPtsUs = videoPtsUs - m_videoStartPtsUs; m_videoClock.setClock(normVideoPtsUs); } } void AVPlayer::videoCallback(std::shared_ptr par) { double time = 0.00; double duration = 0.00; double delay = 0.00; if (m_clockInitFlag == -1) { initAVClock(); } do { if (m_exit) break; if (m_pause) { std::this_thread::sleep_for(std::chrono::milliseconds(50)); continue; } if (m_decoder->getRemainingVFrame()) { MyFrame* lastFrame = m_decoder->peekLastVFrame(); MyFrame* frame = m_decoder->peekVFrame(); //qDebug()<<"video pts:"<pts; if (frame->serial != m_decoder->vidPktSerial()) { m_decoder->setNextVFrame(); continue; } if (frame->serial != lastFrame->serial) { // 优化:直接使用高精度时间戳重置帧定时器,避免除法运算 int64_t currentTimeUs = av_gettime_relative(); m_frameTimerUs = currentTimeUs - m_baseTimeUs; } duration = vpDuration(lastFrame, frame); delay = computeTargetDelay(duration); // 优化:直接使用高精度时间戳计算当前时间,避免除法运算 int64_t currentTimeUs = av_gettime_relative(); int64_t timeUs = currentTimeUs - m_baseTimeUs; // 性能监控:检测延迟累积 m_performanceFrameCount++; if (m_performanceFrameCount % LowLatencyConfig::DELAY_MONITOR_INTERVAL == 0) { if (delay > m_lastDelayValue + LowLatencyConfig::DELAY_ACCUMULATION_THRESHOLD) { // 检测到延迟累积,进行校正 delay *= LowLatencyConfig::DELAY_RESET_FACTOR; qDebug() << "Delay accumulation detected, correcting delay from" << m_lastDelayValue << "to" << delay; } m_lastDelayValue = delay; } //qDebug()<<"delay:"<(delay * 1000000.0); if (timeUs < m_frameTimerUs + delayUs) { // 优化:使用更精确的睡眠时间计算 int64_t sleepTimeUs = m_frameTimerUs + delayUs - timeUs; int64_t maxSleepUs = LowLatencyConfig::BALANCED_SYNC_REJUDGE_THRESHOLD_US; if (sleepTimeUs > maxSleepUs) { sleepTimeUs = maxSleepUs; } QThread::msleep(static_cast(sleepTimeUs / 1000)); continue; } m_frameTimerUs += delayUs; int64_t maxThresholdUs = LowLatencyConfig::BALANCED_SYNC_THRESHOLD_MAX_US; if (timeUs - m_frameTimerUs > maxThresholdUs) { m_frameTimerUs = timeUs; // 帧定时器校正时重置性能计数器 int64_t correctionThresholdUs = LowLatencyConfig::FRAME_TIMER_CORRECTION_THRESHOLD_US; if (timeUs - m_frameTimerUs > correctionThresholdUs) { m_performanceFrameCount = 0; } } //队列中未显示帧一帧以上执行逻辑丢帧判断,倍速播放和逐帧播放 //都不跑进此逻辑,倍速易造成丢帧过多导致界面不流畅 // 平衡:温和的丢帧策略,保证稳定性 if (m_playSpeed == 1.0 && m_decoder->getRemainingVFrame() > 1) { // 恢复为>1,更稳定 MyFrame* nextFrame = m_decoder->peekNextVFrame(); if (nextFrame) { duration = nextFrame->pts - frame->pts; //若主时钟超前到大于当前帧理论显示应持续的时间了,则当前帧立即丢弃 // 平衡:使用原始duration阈值,避免过度丢帧 int64_t durationUs = static_cast(duration * 1000000.0); if (timeUs > m_frameTimerUs + durationUs) { m_decoder->setNextVFrame(); qDebug() << "abandon vframe (balanced mode)" << Qt::endl; continue; } } // 温和:基于延迟的丢帧阈值使用配置文件参数 if (delay > LowLatencyConfig::FRAME_DROP_THRESHOLD) { // 使用配置文件中的丢帧阈值 m_decoder->setNextVFrame(); qDebug() << "drop frame due to high delay:" << delay << Qt::endl; continue; } } displayImage(&frame->frame); // 无音频时,基于视频时钟更新对外进度 if (!m_hasAudio) { uint32_t _pts = (uint32_t) (m_videoClock.getClock() / 1000000); if (m_lastAudPts != _pts) { emit AVPtsChanged(_pts); m_lastAudPts = _pts; } } //读索引后移 m_decoder->setNextVFrame(); } else { QThread::msleep(10); } } while (true); //qDebug()<<"videoCallBack exit"<isExit()) { emit AVTerminate(); } } double AVPlayer::computeTargetDelay(double delay) { // 当选择视频为主时钟时,直接按视频节奏播放(不做同步调整) if (m_syncType == SYNC_VIDEO) { return delay; } // 统一使用微秒单位进行计算 const int64_t videoUs = m_videoClock.getClock(); // 选择主时钟:音频/外部(二选一,若缺失则回退) int64_t masterUs = 0; if (m_syncType == SYNC_AUDIO) { if (m_hasAudio) { masterUs = m_audioClock.getClock(); } else { // 无音频时回退到外部时钟 masterUs = m_extClock.getClock(); } } else { // SYNC_EXTERNAL 或其他情况,统一用外部时钟 masterUs = m_extClock.getClock(); } const int64_t diffUs = videoUs - masterUs; // 正值:视频超前主时钟;负值:视频落后 // 将单帧延时(秒)转为微秒,并在最小/最大阈值之间夹紧 int64_t delayUs = static_cast(delay * 1000000.0); int64_t syncUs = FFMAX(LowLatencyConfig::BALANCED_SYNC_THRESHOLD_MIN_US, FFMIN(LowLatencyConfig::BALANCED_SYNC_THRESHOLD_MAX_US, delayUs)); // 放弃同步的阈值(2s)改为微秒单位进行判断 const int64_t noSyncUs = static_cast(LowLatencyConfig::BALANCED_NOSYNC_THRESHOLD * 1000000.0); const int64_t frameDupUs = static_cast(LowLatencyConfig::BALANCED_SYNC_FRAMEDUP_THRESHOLD * 1000000.0); if (qAbs(diffUs) < noSyncUs) { if (diffUs <= -syncUs) { // 视频落后主时钟:尽快追赶 const int64_t newDelayUs = FFMAX(0LL, diffUs + delayUs); delay = static_cast(newDelayUs) / 1000000.0; } else if (diffUs >= syncUs && delayUs > frameDupUs) { // 视频超前主时钟且帧时长较长:延时一个帧时长 + 超前量 const int64_t newDelayUs = diffUs + delayUs; delay = static_cast(newDelayUs) / 1000000.0; } else if (diffUs >= syncUs) { // 高帧率场景:延时两个视频帧时长 delay = 2.0 * delay; } } return delay; } double AVPlayer::vpDuration(MyFrame* lastFrame, MyFrame* curFrame) { if (curFrame->serial == lastFrame->serial) { double duration = curFrame->pts - lastFrame->pts; if (isnan(duration) || duration > LowLatencyConfig::BALANCED_NOSYNC_THRESHOLD) return lastFrame->duration; else return duration; } else { return 0.00; } }