av_player.cpp 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607
  1. #include "av_player.h"
  2. #include <QDebug>
  3. #include <QImage>
  4. #include <QThread>
  5. #include "threadpool.h"
  6. #include "vframe.h"
  7. #include "low_latency_config.h"
  8. // 同步阈值定义已移至配置文件(low_latency_config.h)
  9. //单帧视频时长阈值上限,用于适配低帧时同步,
  10. //帧率过低视频帧超前不适合翻倍延迟,应特殊
  11. //处理,这里设置上限一秒10帧
  12. //同步操作摆烂阈值上限,此时同步已无意义
  13. AVPlayer::AVPlayer()
  14. : m_decoder(new Decoder)
  15. , m_fmtCtx(nullptr)
  16. , m_audioFrame(av_frame_alloc())
  17. , m_imageWidth(300)
  18. , m_imageHeight(300)
  19. , m_swrCtx(nullptr)
  20. , m_swsCtx(nullptr)
  21. , m_buffer(nullptr)
  22. , m_audioBuf(nullptr)
  23. , m_duration(0)
  24. , m_volume(30)
  25. , m_exit(0)
  26. , m_pause(0)
  27. , m_playSpeed(1.0)
  28. {
  29. m_sonicStream = nullptr;
  30. }
  31. AVPlayer::~AVPlayer()
  32. {
  33. av_frame_free(&m_audioFrame);
  34. clearPlayer();
  35. delete m_decoder;
  36. if (m_swrCtx)
  37. swr_free(&m_swrCtx);
  38. if (m_swsCtx)
  39. sws_freeContext(m_swsCtx);
  40. if (m_audioBuf)
  41. av_free(m_audioBuf);
  42. if (m_buffer)
  43. av_free(m_buffer);
  44. }
  45. int AVPlayer::play(const QString& url)
  46. {
  47. clearPlayer();
  48. if (!m_decoder->decode(url)) {
  49. qDebug() << "decode failed";
  50. return 0;
  51. }
  52. //解码成功可获取流时长
  53. m_duration = m_decoder->avDuration();
  54. emit AVDurationChanged(m_duration);
  55. m_pause = 0;
  56. m_clockInitFlag = -1;
  57. // 判断是否存在音/视频流
  58. m_audioIndex = m_decoder->audioIndex();
  59. m_videoIndex = m_decoder->videoIndex();
  60. m_hasAudio = (m_audioIndex >= 0);
  61. m_hasVideo = (m_videoIndex >= 0);
  62. bool ok = false;
  63. if (m_hasAudio) {
  64. if (initSDL()) {
  65. ok = true;
  66. } else {
  67. qDebug() << "init sdl failed!";
  68. }
  69. }
  70. if (m_hasVideo) {
  71. if (initVideo()) {
  72. ok = true;
  73. } else {
  74. qDebug() << "init video failed!";
  75. }
  76. }
  77. // 仅音频时,主动初始化时钟
  78. if (!m_hasVideo && m_hasAudio) {
  79. initAVClock();
  80. }
  81. return ok ? 1 : 0;
  82. }
  83. void fillAStreamCallback(void* userdata, uint8_t* stream, int len)
  84. {
  85. memset(stream, 0, len);
  86. AVPlayer* is = (AVPlayer*) userdata;
  87. static double audioPts = 0.00;
  88. while (len > 0) {
  89. if (is->m_exit)
  90. return;
  91. if (is->m_audioBufIndex >= is->m_audioBufSize) { /*index到缓冲区末尾,重新填充数据*/
  92. int ret = is->m_decoder->getAFrame(is->m_audioFrame);
  93. if (ret) {
  94. is->m_audioBufIndex = 0;
  95. if ((is->m_targetSampleFmt != is->m_audioFrame->format
  96. || is->m_targetChannelLayout != (int64_t)ffmpeg_get_frame_channel_layout(is->m_audioFrame)
  97. || is->m_targetFreq != is->m_audioFrame->sample_rate
  98. || is->m_targetNbSamples != is->m_audioFrame->nb_samples)
  99. && !is->m_swrCtx) {
  100. is->m_swrCtx = ffmpeg_swr_alloc_set_opts(nullptr,
  101. is->m_targetChannelLayout,
  102. is->m_targetSampleFmt,
  103. is->m_targetFreq,
  104. (int64_t)ffmpeg_get_frame_channel_layout(is->m_audioFrame),
  105. (enum AVSampleFormat) is->m_audioFrame->format,
  106. is->m_audioFrame->sample_rate,
  107. 0,
  108. nullptr);
  109. if (!is->m_swrCtx || swr_init(is->m_swrCtx) < 0) {
  110. qDebug() << "swr_init failed";
  111. return;
  112. }
  113. }
  114. if (is->m_swrCtx) {
  115. const uint8_t** in = (const uint8_t**) is->m_audioFrame->extended_data;
  116. int out_count = (uint64_t) is->m_audioFrame->nb_samples * is->m_targetFreq
  117. / is->m_audioFrame->sample_rate
  118. + 256;
  119. int out_size = ffmpeg_get_buffer_size(is->m_targetSampleFmt,
  120. is->m_targetChannels,
  121. out_count,
  122. 0);
  123. if (out_size < 0) {
  124. qDebug() << "av_samples_get_buffer_size failed";
  125. return;
  126. }
  127. av_fast_malloc(&is->m_audioBuf, &is->m_audioBufSize, out_size);
  128. if (!is->m_audioBuf) {
  129. qDebug() << "av_fast_malloc failed";
  130. return;
  131. }
  132. int len2 = swr_convert(is->m_swrCtx,
  133. &is->m_audioBuf,
  134. out_count,
  135. in,
  136. is->m_audioFrame->nb_samples);
  137. if (len2 < 0) {
  138. qDebug() << "swr_convert failed";
  139. return;
  140. }
  141. if (is->m_playSpeed != 1.0) {
  142. sonicSetSpeed(is->m_sonicStream, is->m_playSpeed);
  143. int ret = sonicWriteShortToStream(is->m_sonicStream,
  144. (short*) is->m_audioBuf,
  145. len2);
  146. int availSamples = sonicSamplesAvailable(is->m_sonicStream);
  147. if (!availSamples) {
  148. is->m_audioBufSize = is->m_audioBufIndex;
  149. continue;
  150. }
  151. int numSamples = availSamples;
  152. int bytes = numSamples * is->m_targetChannels
  153. * av_get_bytes_per_sample(is->m_targetSampleFmt);
  154. if (bytes > out_size) {
  155. av_fast_malloc(&is->m_audioBuf, &is->m_audioBufSize, bytes);
  156. }
  157. len2 = sonicReadShortFromStream(is->m_sonicStream,
  158. (short*) is->m_audioBuf,
  159. numSamples);
  160. }
  161. is->m_audioBufSize = len2 * is->m_targetChannels
  162. * av_get_bytes_per_sample(is->m_targetSampleFmt);
  163. } else {
  164. is->m_audioBufSize = ffmpeg_get_buffer_size(is->m_targetSampleFmt,
  165. is->m_targetChannels,
  166. is->m_audioFrame->nb_samples,
  167. 0);
  168. av_fast_malloc(&is->m_audioBuf, &is->m_audioBufSize, is->m_audioBufSize + 256);
  169. if (!is->m_audioBuf) {
  170. qDebug() << "av_fast_malloc failed";
  171. return;
  172. }
  173. memcpy(is->m_audioBuf, is->m_audioFrame->data[0], is->m_audioBufSize);
  174. }
  175. audioPts = is->m_audioFrame->pts
  176. * av_q2d(is->m_fmtCtx->streams[is->m_audioIndex]->time_base);
  177. //qDebug()<<is->m_audioPts;
  178. av_frame_unref(is->m_audioFrame);
  179. } else {
  180. //判断是否真正播放到文件末尾
  181. if (is->m_decoder->isExit()) {
  182. emit is->AVTerminate();
  183. }
  184. return;
  185. }
  186. }
  187. int len1 = is->m_audioBufSize - is->m_audioBufIndex;
  188. len1 = (len1 > len ? len : len1);
  189. SDL_MixAudio(stream, is->m_audioBuf + is->m_audioBufIndex, len1, is->m_volume);
  190. len -= len1;
  191. is->m_audioBufIndex += len1;
  192. stream += len1;
  193. }
  194. //记录音频时钟
  195. is->m_audioClock.setClock(audioPts);
  196. //发送时间戳变化信号,因为进度以整数秒单位变化展示,
  197. //所以大于一秒才发送,避免过于频繁的信号槽通信消耗性能
  198. uint32_t _pts = (uint32_t) audioPts;
  199. if (is->m_lastAudPts != _pts) {
  200. emit is->AVPtsChanged(_pts);
  201. is->m_lastAudPts = _pts;
  202. }
  203. }
  204. int AVPlayer::initSDL()
  205. {
  206. // 无音频流直接返回失败,调用方根据 m_hasAudio 控制
  207. if (m_decoder->audioIndex() < 0)
  208. return 0;
  209. if (SDL_Init(SDL_INIT_AUDIO) != 0) {
  210. qDebug() << "SDL_Init failed";
  211. return 0;
  212. }
  213. m_exit = 0;
  214. m_audioBufSize = 0;
  215. m_audioBufIndex = 0;
  216. m_lastAudPts = -1;
  217. m_audioCodecPar = m_decoder->audioCodecPar();
  218. m_audioIndex = m_decoder->audioIndex();
  219. m_fmtCtx = m_decoder->formatContext();
  220. // 音频设备配置(平衡模式 - 类似VLC)
  221. SDL_AudioSpec wanted_spec;
  222. // wanted_spec.channels = m_audioCodecPar->channels;
  223. wanted_spec.channels = ffmpeg_get_codec_channels(m_fmtCtx->streams[m_audioIndex]);
  224. wanted_spec.freq = m_audioCodecPar->sample_rate;
  225. wanted_spec.format = AUDIO_S16SYS;
  226. wanted_spec.silence = 0;
  227. wanted_spec.callback = fillAStreamCallback;
  228. wanted_spec.userdata = this;
  229. // 平衡音频回调缓冲区,兼顾延迟与稳定性(需为2的幂)
  230. wanted_spec.samples = LowLatencyConfig::BALANCED_AUDIO_SAMPLES; // 使用配置文件中的音频样本数
  231. if (SDL_OpenAudio(&wanted_spec, nullptr) < 0) {
  232. qDebug() << "SDL_OpenAudio failed";
  233. return 0;
  234. }
  235. m_targetSampleFmt = AV_SAMPLE_FMT_S16;
  236. // m_targetChannels = m_audioCodecPar->channels;
  237. m_targetChannels = ffmpeg_get_codec_channels(m_fmtCtx->streams[m_audioIndex]);
  238. m_targetFreq = m_audioCodecPar->sample_rate;
  239. m_targetChannelLayout = (int64_t)ffmpeg_get_default_channel_layout(m_targetChannels);
  240. m_targetNbSamples = m_audioCodecPar->frame_size;
  241. m_audioIndex = m_decoder->audioIndex();
  242. m_fmtCtx = m_decoder->formatContext();
  243. m_sonicStream = sonicCreateStream(m_targetFreq, m_targetChannels);
  244. sonicSetQuality(m_sonicStream, 1);
  245. SDL_PauseAudio(0);
  246. return 1;
  247. }
  248. int AVPlayer::initVideo()
  249. {
  250. m_frameTimer = 0.00;
  251. m_videoCodecPar = m_decoder->videoCodecPar();
  252. m_videoIndex = m_decoder->videoIndex();
  253. m_fmtCtx = m_decoder->formatContext();
  254. m_imageWidth = m_videoCodecPar ? m_videoCodecPar->width : 0;
  255. m_imageHeight = m_videoCodecPar ? m_videoCodecPar->height : 0;
  256. m_dstPixFmt = AV_PIX_FMT_YUV422P;
  257. // 改用快速缩放算法以降低转换时延
  258. m_swsFlags = SWS_FAST_BILINEAR;
  259. // 仅当分辨率有效时分配缓存,否则延迟到首帧分配
  260. if (m_imageWidth > 0 && m_imageHeight > 0) {
  261. int bufSize = av_image_get_buffer_size(m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
  262. m_buffer = (uint8_t*) av_realloc(m_buffer, bufSize * sizeof(uint8_t));
  263. av_image_fill_arrays(m_pixels, m_pitch, m_buffer, m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
  264. }
  265. //视频帧播放回调递插入线程池任务队列
  266. if (!ThreadPool::addTask(std::bind(&AVPlayer::videoCallback, this, std::placeholders::_1),
  267. std::make_shared<int>(0))) {
  268. qDebug() << "videoCallback add task failed!";
  269. }
  270. return 1;
  271. }
  272. void AVPlayer::pause(bool isPause)
  273. {
  274. if (m_hasAudio) {
  275. if (SDL_GetAudioStatus() == SDL_AUDIO_STOPPED)
  276. return;
  277. if (isPause) {
  278. if (SDL_GetAudioStatus() == SDL_AUDIO_PLAYING) {
  279. SDL_PauseAudio(1);
  280. m_pauseTime = av_gettime_relative() / 1000000.0;
  281. m_pause = 1;
  282. }
  283. } else {
  284. if (SDL_GetAudioStatus() == SDL_AUDIO_PAUSED) {
  285. SDL_PauseAudio(0);
  286. m_frameTimer += av_gettime_relative() / 1000000.0 - m_pauseTime;
  287. m_pause = 0;
  288. }
  289. }
  290. } else if (m_hasVideo) {
  291. // 仅视频:通过标志控制回放线程
  292. double now = av_gettime_relative() / 1000000.0;
  293. if (isPause) {
  294. if (!m_pause) {
  295. m_pauseTime = now;
  296. m_pause = 1;
  297. }
  298. } else {
  299. if (m_pause) {
  300. m_frameTimer += now - m_pauseTime;
  301. m_pause = 0;
  302. }
  303. }
  304. }
  305. }
  306. void AVPlayer::clearPlayer()
  307. {
  308. if (playState() != AV_STOPPED) {
  309. m_exit = 1;
  310. if (m_hasAudio && playState() == AV_PLAYING)
  311. SDL_PauseAudio(1);
  312. m_decoder->exit();
  313. if (m_hasAudio)
  314. SDL_CloseAudio();
  315. if (m_swrCtx)
  316. swr_free(&m_swrCtx);
  317. if (m_swsCtx)
  318. sws_freeContext(m_swsCtx);
  319. m_swrCtx = nullptr;
  320. m_swsCtx = nullptr;
  321. if (m_sonicStream)
  322. sonicDestroyStream(m_sonicStream);
  323. m_sonicStream = nullptr;
  324. }
  325. }
  326. AVTool::MediaInfo* AVPlayer::detectMediaInfo(const QString& url)
  327. {
  328. return m_decoder->detectMediaInfo(url);
  329. }
  330. AVPlayer::PlayState AVPlayer::playState()
  331. {
  332. if (m_hasAudio) {
  333. AVPlayer::PlayState state;
  334. switch (SDL_GetAudioStatus()) {
  335. case SDL_AUDIO_PLAYING:
  336. state = AVPlayer::AV_PLAYING;
  337. break;
  338. case SDL_AUDIO_PAUSED:
  339. state = AVPlayer::AV_PAUSED;
  340. break;
  341. case SDL_AUDIO_STOPPED:
  342. state = AVPlayer::AV_STOPPED;
  343. break;
  344. default:
  345. state = AVPlayer::AV_STOPPED;
  346. break;
  347. }
  348. return state;
  349. }
  350. if (m_hasVideo) {
  351. if (m_exit)
  352. return AV_STOPPED;
  353. if (m_pause)
  354. return AV_PAUSED;
  355. return AV_PLAYING;
  356. }
  357. return AV_STOPPED;
  358. }
  359. void AVPlayer::initAVClock()
  360. {
  361. m_audioClock.setClock(0.00);
  362. m_videoClock.setClock(0.00);
  363. m_clockInitFlag = 1;
  364. }
  365. void AVPlayer::displayImage(AVFrame* frame)
  366. {
  367. if (frame) {
  368. // 首帧兜底:探测不到分辨率时,用首帧尺寸初始化并分配缓存
  369. if (m_imageWidth <= 0 || m_imageHeight <= 0) {
  370. m_imageWidth = frame->width;
  371. m_imageHeight = frame->height;
  372. int bufSize = av_image_get_buffer_size(m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
  373. m_buffer = (uint8_t*) av_realloc(m_buffer, bufSize * sizeof(uint8_t));
  374. av_image_fill_arrays(m_pixels, m_pitch, m_buffer, m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
  375. }
  376. // 判断是否需要像素格式/分辨率转换
  377. bool needConvert =
  378. (frame->format != m_dstPixFmt) ||
  379. (frame->width != m_imageWidth) ||
  380. (frame->height != m_imageHeight);
  381. if (needConvert) {
  382. m_swsCtx = sws_getCachedContext(m_swsCtx,
  383. frame->width,
  384. frame->height,
  385. (enum AVPixelFormat) frame->format,
  386. m_imageWidth,
  387. m_imageHeight,
  388. m_dstPixFmt,
  389. m_swsFlags,
  390. nullptr,
  391. nullptr,
  392. nullptr);
  393. if (m_swsCtx) {
  394. // 确保输出缓存已按当前目标尺寸分配
  395. int bufSize = av_image_get_buffer_size(m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
  396. m_buffer = (uint8_t*) av_realloc(m_buffer, bufSize * sizeof(uint8_t));
  397. av_image_fill_arrays(m_pixels, m_pitch, m_buffer, m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
  398. sws_scale(m_swsCtx, frame->data, frame->linesize, 0, frame->height, m_pixels, m_pitch);
  399. uint8_t* planes[4] = { m_pixels[0], m_pixels[1], m_pixels[2], m_pixels[3] };
  400. int lines[4] = { m_pitch[0], m_pitch[1], m_pitch[2], m_pitch[3] };
  401. emit frameChanged(QSharedPointer<VideoFrame>::create(m_dstPixFmt,
  402. m_imageWidth,
  403. m_imageHeight,
  404. planes,
  405. lines));
  406. } else {
  407. // 回退:直接透传
  408. emit frameChanged(QSharedPointer<VideoFrame>::create((AVPixelFormat) frame->format,
  409. frame->width,
  410. frame->height,
  411. frame->data,
  412. frame->linesize));
  413. }
  414. } else {
  415. // 无需转换,直接透传
  416. emit frameChanged(QSharedPointer<VideoFrame>::create((AVPixelFormat) frame->format,
  417. frame->width,
  418. frame->height,
  419. frame->data,
  420. frame->linesize));
  421. }
  422. //记录视频时钟
  423. m_videoClock.setClock(frame->pts * av_q2d(m_fmtCtx->streams[m_videoIndex]->time_base));
  424. }
  425. }
  426. void AVPlayer::videoCallback(std::shared_ptr<void> par)
  427. {
  428. double time = 0.00;
  429. double duration = 0.00;
  430. double delay = 0.00;
  431. if (m_clockInitFlag == -1) {
  432. initAVClock();
  433. }
  434. do {
  435. if (m_exit)
  436. break;
  437. if (m_pause) {
  438. std::this_thread::sleep_for(std::chrono::milliseconds(50));
  439. continue;
  440. }
  441. if (m_decoder->getRemainingVFrame()) {
  442. MyFrame* lastFrame = m_decoder->peekLastVFrame();
  443. MyFrame* frame = m_decoder->peekVFrame();
  444. //qDebug()<<"video pts:"<<frame->pts;
  445. if (frame->serial != m_decoder->vidPktSerial()) {
  446. m_decoder->setNextVFrame();
  447. continue;
  448. }
  449. if (frame->serial != lastFrame->serial)
  450. m_frameTimer = av_gettime_relative() / 1000000.0;
  451. duration = vpDuration(lastFrame, frame);
  452. delay = computeTargetDelay(duration);
  453. time = av_gettime_relative() / 1000000.0;
  454. //qDebug()<<"delay:"<<delay<<endl;
  455. //显示时长未到
  456. if (time < m_frameTimer + delay) {
  457. QThread::msleep(
  458. (uint32_t) (FFMIN(LowLatencyConfig::BALANCED_SYNC_REJUDGE_THRESHOLD, m_frameTimer + delay - time) * 1000));
  459. continue;
  460. }
  461. m_frameTimer += delay;
  462. if (time - m_frameTimer > LowLatencyConfig::BALANCED_SYNC_THRESHOLD_MAX)
  463. m_frameTimer = time;
  464. //队列中未显示帧一帧以上执行逻辑丢帧判断,倍速播放和逐帧播放
  465. //都不跑进此逻辑,倍速易造成丢帧过多导致界面不流畅
  466. // 平衡:温和的丢帧策略,保证稳定性
  467. if (m_playSpeed == 1.0 && m_decoder->getRemainingVFrame() > 1) { // 恢复为>1,更稳定
  468. MyFrame* nextFrame = m_decoder->peekNextVFrame();
  469. if (nextFrame) {
  470. duration = nextFrame->pts - frame->pts;
  471. //若主时钟超前到大于当前帧理论显示应持续的时间了,则当前帧立即丢弃
  472. // 平衡:使用原始duration阈值,避免过度丢帧
  473. if (time > m_frameTimer + duration) {
  474. m_decoder->setNextVFrame();
  475. qDebug() << "abandon vframe (balanced mode)" << Qt::endl;
  476. continue;
  477. }
  478. }
  479. // 温和:基于延迟的丢帧阈值使用配置文件参数
  480. if (delay > LowLatencyConfig::FRAME_DROP_THRESHOLD) { // 使用配置文件中的丢帧阈值
  481. m_decoder->setNextVFrame();
  482. qDebug() << "drop frame due to high delay:" << delay << Qt::endl;
  483. continue;
  484. }
  485. }
  486. displayImage(&frame->frame);
  487. // 无音频时,基于视频时钟更新对外进度
  488. if (!m_hasAudio) {
  489. uint32_t _pts = (uint32_t) m_videoClock.getClock();
  490. if (m_lastAudPts != _pts) {
  491. emit AVPtsChanged(_pts);
  492. m_lastAudPts = _pts;
  493. }
  494. }
  495. //读索引后移
  496. m_decoder->setNextVFrame();
  497. } else {
  498. QThread::msleep(10);
  499. }
  500. } while (true);
  501. //qDebug()<<"videoCallBack exit"<<endl;
  502. if (m_decoder->isExit()) {
  503. emit AVTerminate();
  504. }
  505. }
  506. double AVPlayer::computeTargetDelay(double delay)
  507. {
  508. // 无音频流时,不进行音视频同步,直接按视频节奏播放
  509. if (!m_hasAudio)
  510. return delay;
  511. //视频当前显示帧与当前播放音频帧时间戳差值
  512. double diff = m_videoClock.getClock() - m_audioClock.getClock();
  513. //计算同步阈值
  514. double sync = FFMAX(LowLatencyConfig::BALANCED_SYNC_THRESHOLD_MIN, FFMIN(LowLatencyConfig::BALANCED_SYNC_THRESHOLD_MAX, delay));
  515. //不同步时间超过阈值直接放弃同步
  516. if (!isnan(diff) && fabs(diff) < LowLatencyConfig::BALANCED_NOSYNC_THRESHOLD) {
  517. if (diff
  518. <= -sync) { //视频已落后音频大于一帧的显示时长,delay值应为0,立马将当前帧显示追赶音频
  519. delay = FFMAX(0, diff + delay);
  520. } else if (diff >= sync
  521. && delay
  522. > LowLatencyConfig::BALANCED_SYNC_FRAMEDUP_THRESHOLD) { //视频超前音频大于一个视频帧的时间,延时一个视频帧时间+已超时时间,下次判定将至少被延时到下个将要显示的视频帧pts
  523. delay = diff + delay;
  524. } else if (diff >= sync) { //高帧率视频直接延时两个视频帧时间;;;;
  525. delay = 2 * delay;
  526. }
  527. }
  528. return delay;
  529. }
  530. double AVPlayer::vpDuration(MyFrame* lastFrame, MyFrame* curFrame)
  531. {
  532. if (curFrame->serial == lastFrame->serial) {
  533. double duration = curFrame->pts - lastFrame->pts;
  534. if (isnan(duration) || duration > LowLatencyConfig::BALANCED_NOSYNC_THRESHOLD)
  535. return lastFrame->duration;
  536. else
  537. return duration;
  538. } else {
  539. return 0.00;
  540. }
  541. }