|
|
@@ -30,8 +30,13 @@ AVPlayer::AVPlayer()
|
|
|
, m_exit(0)
|
|
|
, m_pause(0)
|
|
|
, m_playSpeed(1.0)
|
|
|
+ , m_baseTimeUs(0)
|
|
|
+ , m_performanceFrameCount(0)
|
|
|
+ , m_lastDelayValue(0.0)
|
|
|
{
|
|
|
m_sonicStream = nullptr;
|
|
|
+ // 初始化高精度时间基准
|
|
|
+ m_baseTimeUs = av_gettime_relative();
|
|
|
}
|
|
|
|
|
|
AVPlayer::~AVPlayer()
|
|
|
@@ -207,8 +212,9 @@ void fillAStreamCallback(void* userdata, uint8_t* stream, int len)
|
|
|
is->m_audioBufIndex += len1;
|
|
|
stream += len1;
|
|
|
}
|
|
|
- //记录音频时钟
|
|
|
- is->m_audioClock.setClock(audioPts);
|
|
|
+ //记录音频时钟,转换为微秒时间戳
|
|
|
+ int64_t audioPtsUs = static_cast<int64_t>(audioPts * 1000000.0);
|
|
|
+ is->m_audioClock.setClock(audioPtsUs);
|
|
|
//发送时间戳变化信号,因为进度以整数秒单位变化展示,
|
|
|
//所以大于一秒才发送,避免过于频繁的信号槽通信消耗性能
|
|
|
uint32_t _pts = (uint32_t) audioPts;
|
|
|
@@ -224,9 +230,12 @@ int AVPlayer::initSDL()
|
|
|
if (m_decoder->audioIndex() < 0)
|
|
|
return 0;
|
|
|
|
|
|
- if (SDL_Init(SDL_INIT_AUDIO) != 0) {
|
|
|
- qDebug() << "SDL_Init failed";
|
|
|
- return 0;
|
|
|
+ // 性能优化:使用更快的SDL初始化方式
|
|
|
+ if (SDL_WasInit(SDL_INIT_AUDIO) == 0) {
|
|
|
+ if (SDL_Init(SDL_INIT_AUDIO) != 0) {
|
|
|
+ qDebug() << "SDL_Init failed";
|
|
|
+ return 0;
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
m_exit = 0;
|
|
|
@@ -240,24 +249,22 @@ int AVPlayer::initSDL()
|
|
|
m_audioIndex = m_decoder->audioIndex();
|
|
|
m_fmtCtx = m_decoder->formatContext();
|
|
|
|
|
|
- // 音频设备配置(平衡模式 - 类似VLC)
|
|
|
+ // 性能优化:使用更小的音频缓冲区减少延迟
|
|
|
SDL_AudioSpec wanted_spec;
|
|
|
- // wanted_spec.channels = m_audioCodecPar->channels;
|
|
|
wanted_spec.channels = ffmpeg_get_codec_channels(m_fmtCtx->streams[m_audioIndex]);
|
|
|
wanted_spec.freq = m_audioCodecPar->sample_rate;
|
|
|
wanted_spec.format = AUDIO_S16SYS;
|
|
|
wanted_spec.silence = 0;
|
|
|
wanted_spec.callback = fillAStreamCallback;
|
|
|
wanted_spec.userdata = this;
|
|
|
- // 平衡音频回调缓冲区,兼顾延迟与稳定性(需为2的幂)
|
|
|
- wanted_spec.samples = LowLatencyConfig::BALANCED_AUDIO_SAMPLES; // 使用配置文件中的音频样本数
|
|
|
+ // 使用配置文件中的音频样本数减少延迟
|
|
|
+ wanted_spec.samples = LowLatencyConfig::MIN_AUDIO_SAMPLES; // 使用配置文件中的最小音频样本数
|
|
|
|
|
|
if (SDL_OpenAudio(&wanted_spec, nullptr) < 0) {
|
|
|
qDebug() << "SDL_OpenAudio failed";
|
|
|
return 0;
|
|
|
}
|
|
|
m_targetSampleFmt = AV_SAMPLE_FMT_S16;
|
|
|
- // m_targetChannels = m_audioCodecPar->channels;
|
|
|
m_targetChannels = ffmpeg_get_codec_channels(m_fmtCtx->streams[m_audioIndex]);
|
|
|
m_targetFreq = m_audioCodecPar->sample_rate;
|
|
|
m_targetChannelLayout = (int64_t)ffmpeg_get_default_channel_layout(m_targetChannels);
|
|
|
@@ -274,7 +281,7 @@ int AVPlayer::initSDL()
|
|
|
|
|
|
int AVPlayer::initVideo()
|
|
|
{
|
|
|
- m_frameTimer = 0.00;
|
|
|
+ m_frameTimerUs = 0; // 使用高精度微秒时间戳
|
|
|
|
|
|
m_videoCodecPar = m_decoder->videoCodecPar();
|
|
|
m_videoIndex = m_decoder->videoIndex();
|
|
|
@@ -310,27 +317,36 @@ void AVPlayer::pause(bool isPause)
|
|
|
if (isPause) {
|
|
|
if (SDL_GetAudioStatus() == SDL_AUDIO_PLAYING) {
|
|
|
SDL_PauseAudio(1);
|
|
|
- m_pauseTime = av_gettime_relative() / 1000000.0;
|
|
|
+ // 优化:直接使用高精度时间戳,避免除法运算
|
|
|
+ int64_t pauseTimeUs = av_gettime_relative();
|
|
|
+ m_pauseTimeUs = pauseTimeUs - (m_baseTimeUs ? m_baseTimeUs : pauseTimeUs);
|
|
|
m_pause = 1;
|
|
|
}
|
|
|
} else {
|
|
|
if (SDL_GetAudioStatus() == SDL_AUDIO_PAUSED) {
|
|
|
SDL_PauseAudio(0);
|
|
|
- m_frameTimer += av_gettime_relative() / 1000000.0 - m_pauseTime;
|
|
|
+ // 优化:直接使用高精度时间戳计算暂停时长,避免除法运算
|
|
|
+ int64_t resumeTimeUs = av_gettime_relative();
|
|
|
+ int64_t resumeElapsedUs = resumeTimeUs - (m_baseTimeUs ? m_baseTimeUs : resumeTimeUs);
|
|
|
+ m_frameTimerUs += resumeElapsedUs - m_pauseTimeUs;
|
|
|
m_pause = 0;
|
|
|
}
|
|
|
}
|
|
|
} else if (m_hasVideo) {
|
|
|
// 仅视频:通过标志控制回放线程
|
|
|
- double now = av_gettime_relative() / 1000000.0;
|
|
|
if (isPause) {
|
|
|
if (!m_pause) {
|
|
|
- m_pauseTime = now;
|
|
|
+ // 优化:直接使用高精度时间戳,避免除法运算
|
|
|
+ int64_t pauseTimeUs = av_gettime_relative();
|
|
|
+ m_pauseTimeUs = pauseTimeUs - (m_baseTimeUs ? m_baseTimeUs : pauseTimeUs);
|
|
|
m_pause = 1;
|
|
|
}
|
|
|
} else {
|
|
|
if (m_pause) {
|
|
|
- m_frameTimer += now - m_pauseTime;
|
|
|
+ // 优化:直接使用高精度时间戳计算暂停时长,避免除法运算
|
|
|
+ int64_t resumeTimeUs = av_gettime_relative();
|
|
|
+ int64_t resumeElapsedUs = resumeTimeUs - (m_baseTimeUs ? m_baseTimeUs : resumeTimeUs);
|
|
|
+ m_frameTimerUs += resumeElapsedUs - m_pauseTimeUs;
|
|
|
m_pause = 0;
|
|
|
}
|
|
|
}
|
|
|
@@ -397,8 +413,8 @@ AVPlayer::PlayState AVPlayer::playState()
|
|
|
|
|
|
void AVPlayer::initAVClock()
|
|
|
{
|
|
|
- m_audioClock.setClock(0.00);
|
|
|
- m_videoClock.setClock(0.00);
|
|
|
+ m_audioClock.setClock(0);
|
|
|
+ m_videoClock.setClock(0);
|
|
|
m_clockInitFlag = 1;
|
|
|
}
|
|
|
|
|
|
@@ -466,8 +482,10 @@ void AVPlayer::displayImage(AVFrame* frame)
|
|
|
frame->linesize));
|
|
|
}
|
|
|
|
|
|
- //记录视频时钟
|
|
|
- m_videoClock.setClock(frame->pts * av_q2d(m_fmtCtx->streams[m_videoIndex]->time_base));
|
|
|
+ //记录视频时钟,转换为微秒时间戳
|
|
|
+ double videoPtsSeconds = frame->pts * av_q2d(m_fmtCtx->streams[m_videoIndex]->time_base);
|
|
|
+ int64_t videoPtsUs = static_cast<int64_t>(videoPtsSeconds * 1000000.0);
|
|
|
+ m_videoClock.setClock(videoPtsUs);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
@@ -497,26 +515,55 @@ void AVPlayer::videoCallback(std::shared_ptr<void> par)
|
|
|
continue;
|
|
|
}
|
|
|
|
|
|
- if (frame->serial != lastFrame->serial)
|
|
|
- m_frameTimer = av_gettime_relative() / 1000000.0;
|
|
|
+ if (frame->serial != lastFrame->serial) {
|
|
|
+ // 优化:直接使用高精度时间戳重置帧定时器,避免除法运算
|
|
|
+ int64_t currentTimeUs = av_gettime_relative();
|
|
|
+ m_frameTimerUs = currentTimeUs - m_baseTimeUs;
|
|
|
+ }
|
|
|
|
|
|
duration = vpDuration(lastFrame, frame);
|
|
|
delay = computeTargetDelay(duration);
|
|
|
|
|
|
- time = av_gettime_relative() / 1000000.0;
|
|
|
+ // 优化:直接使用高精度时间戳计算当前时间,避免除法运算
|
|
|
+ int64_t currentTimeUs = av_gettime_relative();
|
|
|
+ int64_t timeUs = currentTimeUs - m_baseTimeUs;
|
|
|
+
|
|
|
+ // 性能监控:检测延迟累积
|
|
|
+ m_performanceFrameCount++;
|
|
|
+ if (m_performanceFrameCount % LowLatencyConfig::DELAY_MONITOR_INTERVAL == 0) {
|
|
|
+ if (delay > m_lastDelayValue + LowLatencyConfig::DELAY_ACCUMULATION_THRESHOLD) {
|
|
|
+ // 检测到延迟累积,进行校正
|
|
|
+ delay *= LowLatencyConfig::DELAY_RESET_FACTOR;
|
|
|
+ qDebug() << "Delay accumulation detected, correcting delay from" << m_lastDelayValue << "to" << delay;
|
|
|
+ }
|
|
|
+ m_lastDelayValue = delay;
|
|
|
+ }
|
|
|
|
|
|
//qDebug()<<"delay:"<<delay<<endl;
|
|
|
|
|
|
//显示时长未到
|
|
|
- if (time < m_frameTimer + delay) {
|
|
|
- QThread::msleep(
|
|
|
- (uint32_t) (FFMIN(LowLatencyConfig::BALANCED_SYNC_REJUDGE_THRESHOLD, m_frameTimer + delay - time) * 1000));
|
|
|
+ int64_t delayUs = static_cast<int64_t>(delay * 1000000.0);
|
|
|
+ if (timeUs < m_frameTimerUs + delayUs) {
|
|
|
+ // 优化:使用更精确的睡眠时间计算
|
|
|
+ int64_t sleepTimeUs = m_frameTimerUs + delayUs - timeUs;
|
|
|
+ int64_t maxSleepUs = LowLatencyConfig::BALANCED_SYNC_REJUDGE_THRESHOLD_US;
|
|
|
+ if (sleepTimeUs > maxSleepUs) {
|
|
|
+ sleepTimeUs = maxSleepUs;
|
|
|
+ }
|
|
|
+ QThread::msleep(static_cast<uint32_t>(sleepTimeUs / 1000));
|
|
|
continue;
|
|
|
}
|
|
|
|
|
|
- m_frameTimer += delay;
|
|
|
- if (time - m_frameTimer > LowLatencyConfig::BALANCED_SYNC_THRESHOLD_MAX)
|
|
|
- m_frameTimer = time;
|
|
|
+ m_frameTimerUs += delayUs;
|
|
|
+ int64_t maxThresholdUs = LowLatencyConfig::BALANCED_SYNC_THRESHOLD_MAX_US;
|
|
|
+ if (timeUs - m_frameTimerUs > maxThresholdUs) {
|
|
|
+ m_frameTimerUs = timeUs;
|
|
|
+ // 帧定时器校正时重置性能计数器
|
|
|
+ int64_t correctionThresholdUs = LowLatencyConfig::FRAME_TIMER_CORRECTION_THRESHOLD_US;
|
|
|
+ if (timeUs - m_frameTimerUs > correctionThresholdUs) {
|
|
|
+ m_performanceFrameCount = 0;
|
|
|
+ }
|
|
|
+ }
|
|
|
|
|
|
//队列中未显示帧一帧以上执行逻辑丢帧判断,倍速播放和逐帧播放
|
|
|
//都不跑进此逻辑,倍速易造成丢帧过多导致界面不流畅
|
|
|
@@ -527,7 +574,8 @@ void AVPlayer::videoCallback(std::shared_ptr<void> par)
|
|
|
duration = nextFrame->pts - frame->pts;
|
|
|
//若主时钟超前到大于当前帧理论显示应持续的时间了,则当前帧立即丢弃
|
|
|
// 平衡:使用原始duration阈值,避免过度丢帧
|
|
|
- if (time > m_frameTimer + duration) {
|
|
|
+ int64_t durationUs = static_cast<int64_t>(duration * 1000000.0);
|
|
|
+ if (timeUs > m_frameTimerUs + durationUs) {
|
|
|
m_decoder->setNextVFrame();
|
|
|
qDebug() << "abandon vframe (balanced mode)" << Qt::endl;
|
|
|
continue;
|