av_player.cpp 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709
  1. #include "av_player.h"
  2. #include <QDebug>
  3. #include <QImage>
  4. #include <QThread>
  5. #include "threadpool.h"
  6. #include "vframe.h"
  7. #include "low_latency_config.h"
  8. // 同步阈值定义已移至配置文件(low_latency_config.h)
  9. //单帧视频时长阈值上限,用于适配低帧时同步,
  10. //帧率过低视频帧超前不适合翻倍延迟,应特殊
  11. //处理,这里设置上限一秒10帧
  12. //同步操作摆烂阈值上限,此时同步已无意义
  13. AVPlayer::AVPlayer()
  14. : m_decoder(new Decoder)
  15. , m_fmtCtx(nullptr)
  16. , m_audioFrame(av_frame_alloc())
  17. , m_imageWidth(300)
  18. , m_imageHeight(300)
  19. , m_swrCtx(nullptr)
  20. , m_swsCtx(nullptr)
  21. , m_buffer(nullptr)
  22. , m_audioBuf(nullptr)
  23. , m_duration(0)
  24. , m_volume(30)
  25. , m_exit(0)
  26. , m_pause(0)
  27. , m_playSpeed(1.0)
  28. , m_baseTimeUs(0)
  29. , m_performanceFrameCount(0)
  30. , m_lastDelayValue(0.0)
  31. {
  32. m_sonicStream = nullptr;
  33. // 初始化高精度时间基准
  34. m_baseTimeUs = av_gettime_relative();
  35. }
  36. AVPlayer::~AVPlayer()
  37. {
  38. av_frame_free(&m_audioFrame);
  39. clearPlayer();
  40. delete m_decoder;
  41. if (m_swrCtx)
  42. swr_free(&m_swrCtx);
  43. if (m_swsCtx)
  44. sws_freeContext(m_swsCtx);
  45. if (m_audioBuf)
  46. av_free(m_audioBuf);
  47. if (m_buffer)
  48. av_free(m_buffer);
  49. }
  50. int AVPlayer::play(const QString& url)
  51. {
  52. clearPlayer();
  53. if (!m_decoder->decode(url)) {
  54. qDebug() << "decode failed";
  55. return 0;
  56. }
  57. //解码成功可获取流时长
  58. m_duration = m_decoder->avDuration();
  59. emit AVDurationChanged(m_duration);
  60. m_pause = 0;
  61. m_clockInitFlag = -1;
  62. // 播放开始前重置高精度时间基准与PTS归一化基线
  63. m_baseTimeUs = av_gettime_relative();
  64. m_frameTimerUs = 0;
  65. m_audioStartPtsUs = -1;
  66. m_videoStartPtsUs = -1;
  67. // 判断是否存在音/视频流
  68. m_audioIndex = m_decoder->audioIndex();
  69. m_videoIndex = m_decoder->videoIndex();
  70. m_hasAudio = (m_audioIndex >= 0);
  71. m_hasVideo = (m_videoIndex >= 0);
  72. bool ok = false;
  73. if (m_hasAudio) {
  74. if (initSDL()) {
  75. ok = true;
  76. } else {
  77. qDebug() << "init sdl failed!";
  78. }
  79. }
  80. if (m_hasVideo) {
  81. if (initVideo()) {
  82. ok = true;
  83. } else {
  84. qDebug() << "init video failed!";
  85. }
  86. }
  87. // 仅音频时,主动初始化时钟
  88. if (!m_hasVideo && m_hasAudio) {
  89. initAVClock();
  90. }
  91. return ok ? 1 : 0;
  92. }
  93. void fillAStreamCallback(void* userdata, uint8_t* stream, int len)
  94. {
  95. memset(stream, 0, len);
  96. AVPlayer* is = (AVPlayer*) userdata;
  97. static double audioPts = 0.00;
  98. while (len > 0) {
  99. if (is->m_exit)
  100. return;
  101. if (is->m_audioBufIndex >= is->m_audioBufSize) { /*index到缓冲区末尾,重新填充数据*/
  102. int ret = is->m_decoder->getAFrame(is->m_audioFrame);
  103. if (ret) {
  104. is->m_audioBufIndex = 0;
  105. if ((is->m_targetSampleFmt != is->m_audioFrame->format
  106. || is->m_targetChannelLayout != (int64_t)ffmpeg_get_frame_channel_layout(is->m_audioFrame)
  107. || is->m_targetFreq != is->m_audioFrame->sample_rate
  108. || is->m_targetNbSamples != is->m_audioFrame->nb_samples)
  109. && !is->m_swrCtx) {
  110. is->m_swrCtx = ffmpeg_swr_alloc_set_opts(nullptr,
  111. is->m_targetChannelLayout,
  112. is->m_targetSampleFmt,
  113. is->m_targetFreq,
  114. (int64_t)ffmpeg_get_frame_channel_layout(is->m_audioFrame),
  115. (enum AVSampleFormat) is->m_audioFrame->format,
  116. is->m_audioFrame->sample_rate,
  117. 0,
  118. nullptr);
  119. if (!is->m_swrCtx || swr_init(is->m_swrCtx) < 0) {
  120. qDebug() << "swr_init failed";
  121. return;
  122. }
  123. }
  124. if (is->m_swrCtx) {
  125. const uint8_t** in = (const uint8_t**) is->m_audioFrame->extended_data;
  126. int out_count = (uint64_t) is->m_audioFrame->nb_samples * is->m_targetFreq
  127. / is->m_audioFrame->sample_rate
  128. + 256;
  129. int out_size = ffmpeg_get_buffer_size(is->m_targetSampleFmt,
  130. is->m_targetChannels,
  131. out_count,
  132. 0);
  133. if (out_size < 0) {
  134. qDebug() << "av_samples_get_buffer_size failed";
  135. return;
  136. }
  137. av_fast_malloc(&is->m_audioBuf, &is->m_audioBufSize, out_size);
  138. if (!is->m_audioBuf) {
  139. qDebug() << "av_fast_malloc failed";
  140. return;
  141. }
  142. int len2 = swr_convert(is->m_swrCtx,
  143. &is->m_audioBuf,
  144. out_count,
  145. in,
  146. is->m_audioFrame->nb_samples);
  147. if (len2 < 0) {
  148. qDebug() << "swr_convert failed";
  149. return;
  150. }
  151. if (is->m_playSpeed != 1.0) {
  152. sonicSetSpeed(is->m_sonicStream, is->m_playSpeed);
  153. int ret = sonicWriteShortToStream(is->m_sonicStream,
  154. (short*) is->m_audioBuf,
  155. len2);
  156. int availSamples = sonicSamplesAvailable(is->m_sonicStream);
  157. if (!availSamples) {
  158. is->m_audioBufSize = is->m_audioBufIndex;
  159. continue;
  160. }
  161. int numSamples = availSamples;
  162. int bytes = numSamples * is->m_targetChannels
  163. * av_get_bytes_per_sample(is->m_targetSampleFmt);
  164. if (bytes > out_size) {
  165. av_fast_malloc(&is->m_audioBuf, &is->m_audioBufSize, bytes);
  166. }
  167. len2 = sonicReadShortFromStream(is->m_sonicStream,
  168. (short*) is->m_audioBuf,
  169. numSamples);
  170. }
  171. is->m_audioBufSize = len2 * is->m_targetChannels
  172. * av_get_bytes_per_sample(is->m_targetSampleFmt);
  173. } else {
  174. is->m_audioBufSize = ffmpeg_get_buffer_size(is->m_targetSampleFmt,
  175. is->m_targetChannels,
  176. is->m_audioFrame->nb_samples,
  177. 0);
  178. av_fast_malloc(&is->m_audioBuf, &is->m_audioBufSize, is->m_audioBufSize + 256);
  179. if (!is->m_audioBuf) {
  180. qDebug() << "av_fast_malloc failed";
  181. return;
  182. }
  183. memcpy(is->m_audioBuf, is->m_audioFrame->data[0], is->m_audioBufSize);
  184. }
  185. audioPts = is->m_audioFrame->pts
  186. * av_q2d(is->m_fmtCtx->streams[is->m_audioIndex]->time_base);
  187. //qDebug()<<is->m_audioPts;
  188. av_frame_unref(is->m_audioFrame);
  189. } else {
  190. //判断是否真正播放到文件末尾
  191. if (is->m_decoder->isExit()) {
  192. emit is->AVTerminate();
  193. }
  194. return;
  195. }
  196. }
  197. int len1 = is->m_audioBufSize - is->m_audioBufIndex;
  198. len1 = (len1 > len ? len : len1);
  199. SDL_MixAudio(stream, is->m_audioBuf + is->m_audioBufIndex, len1, is->m_volume);
  200. len -= len1;
  201. is->m_audioBufIndex += len1;
  202. stream += len1;
  203. }
  204. //记录音频时钟,转换为微秒时间戳并做归一化(首帧为0)
  205. int64_t audioPtsUs = static_cast<int64_t>(audioPts * 1000000.0);
  206. if (is->m_audioStartPtsUs < 0) {
  207. is->m_audioStartPtsUs = audioPtsUs;
  208. }
  209. int64_t normAudioPtsUs = audioPtsUs - is->m_audioStartPtsUs;
  210. is->m_audioClock.setClock(normAudioPtsUs);
  211. //发送时间戳变化信号(秒)
  212. uint32_t _pts = (uint32_t) audioPts;
  213. if (is->m_lastAudPts != _pts) {
  214. emit is->AVPtsChanged(_pts);
  215. is->m_lastAudPts = _pts;
  216. }
  217. }
  218. int AVPlayer::initSDL()
  219. {
  220. // 无音频流直接返回失败,调用方根据 m_hasAudio 控制
  221. if (m_decoder->audioIndex() < 0)
  222. return 0;
  223. // 性能优化:使用更快的SDL初始化方式
  224. if (SDL_WasInit(SDL_INIT_AUDIO) == 0) {
  225. if (SDL_Init(SDL_INIT_AUDIO) != 0) {
  226. qDebug() << "SDL_Init failed";
  227. return 0;
  228. }
  229. }
  230. m_exit = 0;
  231. m_audioBufSize = 0;
  232. m_audioBufIndex = 0;
  233. m_lastAudPts = -1;
  234. m_audioCodecPar = m_decoder->audioCodecPar();
  235. m_audioIndex = m_decoder->audioIndex();
  236. m_fmtCtx = m_decoder->formatContext();
  237. // 性能优化:使用更小的音频缓冲区减少延迟
  238. SDL_AudioSpec wanted_spec;
  239. wanted_spec.channels = ffmpeg_get_codec_channels(m_fmtCtx->streams[m_audioIndex]);
  240. wanted_spec.freq = m_audioCodecPar->sample_rate;
  241. wanted_spec.format = AUDIO_S16SYS;
  242. wanted_spec.silence = 0;
  243. wanted_spec.callback = fillAStreamCallback;
  244. wanted_spec.userdata = this;
  245. // 使用配置文件中的音频样本数减少延迟
  246. wanted_spec.samples = LowLatencyConfig::MIN_AUDIO_SAMPLES; // 使用配置文件中的最小音频样本数
  247. if (SDL_OpenAudio(&wanted_spec, nullptr) < 0) {
  248. qDebug() << "SDL_OpenAudio failed";
  249. return 0;
  250. }
  251. m_targetSampleFmt = AV_SAMPLE_FMT_S16;
  252. m_targetChannels = ffmpeg_get_codec_channels(m_fmtCtx->streams[m_audioIndex]);
  253. m_targetFreq = m_audioCodecPar->sample_rate;
  254. m_targetChannelLayout = (int64_t)ffmpeg_get_default_channel_layout(m_targetChannels);
  255. m_targetNbSamples = m_audioCodecPar->frame_size;
  256. m_audioIndex = m_decoder->audioIndex();
  257. m_fmtCtx = m_decoder->formatContext();
  258. m_sonicStream = sonicCreateStream(m_targetFreq, m_targetChannels);
  259. sonicSetQuality(m_sonicStream, 1);
  260. SDL_PauseAudio(0);
  261. return 1;
  262. }
  263. int AVPlayer::initVideo()
  264. {
  265. m_frameTimerUs = 0; // 使用高精度微秒时间戳
  266. m_videoCodecPar = m_decoder->videoCodecPar();
  267. m_videoIndex = m_decoder->videoIndex();
  268. m_fmtCtx = m_decoder->formatContext();
  269. m_imageWidth = m_videoCodecPar ? m_videoCodecPar->width : 0;
  270. m_imageHeight = m_videoCodecPar ? m_videoCodecPar->height : 0;
  271. m_dstPixFmt = AV_PIX_FMT_YUV422P;
  272. // 改用快速缩放算法以降低转换时延
  273. m_swsFlags = SWS_FAST_BILINEAR;
  274. // 仅当分辨率有效时分配缓存,否则延迟到首帧分配
  275. if (m_imageWidth > 0 && m_imageHeight > 0) {
  276. int bufSize = av_image_get_buffer_size(m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
  277. m_buffer = (uint8_t*) av_realloc(m_buffer, bufSize * sizeof(uint8_t));
  278. av_image_fill_arrays(m_pixels, m_pitch, m_buffer, m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
  279. }
  280. //视频帧播放回调递插入线程池任务队列
  281. if (!ThreadPool::addTask(std::bind(&AVPlayer::videoCallback, this, std::placeholders::_1),
  282. std::make_shared<int>(0))) {
  283. qDebug() << "videoCallback add task failed!";
  284. }
  285. return 1;
  286. }
  287. void AVPlayer::pause(bool isPause)
  288. {
  289. if (m_hasAudio) {
  290. if (SDL_GetAudioStatus() == SDL_AUDIO_STOPPED)
  291. return;
  292. if (isPause) {
  293. if (SDL_GetAudioStatus() == SDL_AUDIO_PLAYING) {
  294. SDL_PauseAudio(1);
  295. // 记录暂停开始时间与各时钟快照
  296. int64_t pauseTimeUs = av_gettime_relative();
  297. m_pauseTimeUs = pauseTimeUs - (m_baseTimeUs ? m_baseTimeUs : pauseTimeUs);
  298. m_pauseAudClockUs = m_audioClock.getClock();
  299. m_pauseVidClockUs = m_videoClock.getClock();
  300. m_pauseExtClockUs = m_extClock.getClock();
  301. m_pause = 1;
  302. }
  303. } else {
  304. if (SDL_GetAudioStatus() == SDL_AUDIO_PAUSED) {
  305. SDL_PauseAudio(0);
  306. // 恢复:排除暂停时长,并将各时钟锚定到暂停前快照,避免进度跳变
  307. int64_t resumeTimeUs = av_gettime_relative();
  308. int64_t resumeElapsedUs = resumeTimeUs - (m_baseTimeUs ? m_baseTimeUs : resumeTimeUs);
  309. m_frameTimerUs += resumeElapsedUs - m_pauseTimeUs;
  310. // 重新锚定各时钟(微秒),排除暂停期间的墙钟时间
  311. m_audioClock.setClock(m_pauseAudClockUs);
  312. m_videoClock.setClock(m_pauseVidClockUs);
  313. m_extClock.setClock(m_pauseExtClockUs);
  314. m_pause = 0;
  315. }
  316. }
  317. } else if (m_hasVideo) {
  318. // 仅视频:通过标志控制回放线程
  319. if (isPause) {
  320. if (!m_pause) {
  321. int64_t pauseTimeUs = av_gettime_relative();
  322. m_pauseTimeUs = pauseTimeUs - (m_baseTimeUs ? m_baseTimeUs : pauseTimeUs);
  323. // 记录暂停快照
  324. m_pauseVidClockUs = m_videoClock.getClock();
  325. m_pauseExtClockUs = m_extClock.getClock();
  326. m_pause = 1;
  327. }
  328. } else {
  329. if (m_pause) {
  330. int64_t resumeTimeUs = av_gettime_relative();
  331. int64_t resumeElapsedUs = resumeTimeUs - (m_baseTimeUs ? m_baseTimeUs : resumeTimeUs);
  332. m_frameTimerUs += resumeElapsedUs - m_pauseTimeUs;
  333. // 重新锚定视频与外部时钟
  334. m_videoClock.setClock(m_pauseVidClockUs);
  335. m_extClock.setClock(m_pauseExtClockUs);
  336. m_pause = 0;
  337. }
  338. }
  339. }
  340. }
  341. void AVPlayer::clearPlayer()
  342. {
  343. if (playState() != AV_STOPPED) {
  344. m_exit = 1;
  345. if (m_hasAudio && playState() == AV_PLAYING)
  346. SDL_PauseAudio(1);
  347. m_decoder->exit();
  348. if (m_hasAudio)
  349. SDL_CloseAudio();
  350. if (m_swrCtx)
  351. swr_free(&m_swrCtx);
  352. if (m_swsCtx)
  353. sws_freeContext(m_swsCtx);
  354. m_swrCtx = nullptr;
  355. m_swsCtx = nullptr;
  356. if (m_sonicStream)
  357. sonicDestroyStream(m_sonicStream);
  358. m_sonicStream = nullptr;
  359. }
  360. }
  361. AVTool::MediaInfo* AVPlayer::detectMediaInfo(const QString& url)
  362. {
  363. return m_decoder->detectMediaInfo(url);
  364. }
  365. AVPlayer::PlayState AVPlayer::playState()
  366. {
  367. if (m_hasAudio) {
  368. AVPlayer::PlayState state;
  369. switch (SDL_GetAudioStatus()) {
  370. case SDL_AUDIO_PLAYING:
  371. state = AVPlayer::AV_PLAYING;
  372. break;
  373. case SDL_AUDIO_PAUSED:
  374. state = AVPlayer::AV_PAUSED;
  375. break;
  376. case SDL_AUDIO_STOPPED:
  377. state = AVPlayer::AV_STOPPED;
  378. break;
  379. default:
  380. state = AVPlayer::AV_STOPPED;
  381. break;
  382. }
  383. return state;
  384. }
  385. if (m_hasVideo) {
  386. if (m_exit)
  387. return AV_STOPPED;
  388. if (m_pause)
  389. return AV_PAUSED;
  390. return AV_PLAYING;
  391. }
  392. return AV_STOPPED;
  393. }
  394. void AVPlayer::initAVClock()
  395. {
  396. m_audioClock.setClock(0);
  397. m_videoClock.setClock(0);
  398. // 新增:初始化外部时钟为0,对齐ffplay -sync ext 行为
  399. m_extClock.setClock(0);
  400. // 新增:重置PTS基线,确保每次开始都从0归一化
  401. m_audioStartPtsUs = -1;
  402. m_videoStartPtsUs = -1;
  403. m_clockInitFlag = 1;
  404. }
  405. void AVPlayer::displayImage(AVFrame* frame)
  406. {
  407. if (frame) {
  408. // 首帧兜底:探测不到分辨率时,用首帧尺寸初始化并分配缓存
  409. if (m_imageWidth <= 0 || m_imageHeight <= 0) {
  410. m_imageWidth = frame->width;
  411. m_imageHeight = frame->height;
  412. int bufSize = av_image_get_buffer_size(m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
  413. m_buffer = (uint8_t*) av_realloc(m_buffer, bufSize * sizeof(uint8_t));
  414. av_image_fill_arrays(m_pixels, m_pitch, m_buffer, m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
  415. }
  416. // 判断是否需要像素格式/分辨率转换
  417. bool needConvert =
  418. (frame->format != m_dstPixFmt) ||
  419. (frame->width != m_imageWidth) ||
  420. (frame->height != m_imageHeight);
  421. if (needConvert) {
  422. m_swsCtx = sws_getCachedContext(m_swsCtx,
  423. frame->width,
  424. frame->height,
  425. (enum AVPixelFormat) frame->format,
  426. m_imageWidth,
  427. m_imageHeight,
  428. m_dstPixFmt,
  429. m_swsFlags,
  430. nullptr,
  431. nullptr,
  432. nullptr);
  433. if (m_swsCtx) {
  434. // 确保输出缓存已按当前目标尺寸分配
  435. int bufSize = av_image_get_buffer_size(m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
  436. m_buffer = (uint8_t*) av_realloc(m_buffer, bufSize * sizeof(uint8_t));
  437. av_image_fill_arrays(m_pixels, m_pitch, m_buffer, m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
  438. sws_scale(m_swsCtx, frame->data, frame->linesize, 0, frame->height, m_pixels, m_pitch);
  439. uint8_t* planes[4] = { m_pixels[0], m_pixels[1], m_pixels[2], m_pixels[3] };
  440. int lines[4] = { m_pitch[0], m_pitch[1], m_pitch[2], m_pitch[3] };
  441. emit frameChanged(QSharedPointer<VideoFrame>::create(m_dstPixFmt,
  442. m_imageWidth,
  443. m_imageHeight,
  444. planes,
  445. lines));
  446. } else {
  447. // 回退:直接透传
  448. emit frameChanged(QSharedPointer<VideoFrame>::create((AVPixelFormat) frame->format,
  449. frame->width,
  450. frame->height,
  451. frame->data,
  452. frame->linesize));
  453. }
  454. } else {
  455. // 无需转换,直接透传
  456. emit frameChanged(QSharedPointer<VideoFrame>::create((AVPixelFormat) frame->format,
  457. frame->width,
  458. frame->height,
  459. frame->data,
  460. frame->linesize));
  461. }
  462. //记录视频时钟,转换为微秒时间戳并做归一化(首帧为0)
  463. double videoPtsSeconds = frame->pts * av_q2d(m_fmtCtx->streams[m_videoIndex]->time_base);
  464. int64_t videoPtsUs = static_cast<int64_t>(videoPtsSeconds * 1000000.0);
  465. if (m_videoStartPtsUs < 0) {
  466. m_videoStartPtsUs = videoPtsUs;
  467. }
  468. int64_t normVideoPtsUs = videoPtsUs - m_videoStartPtsUs;
  469. m_videoClock.setClock(normVideoPtsUs);
  470. }
  471. }
  472. void AVPlayer::videoCallback(std::shared_ptr<void> par)
  473. {
  474. double time = 0.00;
  475. double duration = 0.00;
  476. double delay = 0.00;
  477. if (m_clockInitFlag == -1) {
  478. initAVClock();
  479. }
  480. do {
  481. if (m_exit)
  482. break;
  483. if (m_pause) {
  484. std::this_thread::sleep_for(std::chrono::milliseconds(50));
  485. continue;
  486. }
  487. if (m_decoder->getRemainingVFrame()) {
  488. MyFrame* lastFrame = m_decoder->peekLastVFrame();
  489. MyFrame* frame = m_decoder->peekVFrame();
  490. //qDebug()<<"video pts:"<<frame->pts;
  491. if (frame->serial != m_decoder->vidPktSerial()) {
  492. m_decoder->setNextVFrame();
  493. continue;
  494. }
  495. if (frame->serial != lastFrame->serial) {
  496. // 优化:直接使用高精度时间戳重置帧定时器,避免除法运算
  497. int64_t currentTimeUs = av_gettime_relative();
  498. m_frameTimerUs = currentTimeUs - m_baseTimeUs;
  499. }
  500. duration = vpDuration(lastFrame, frame);
  501. delay = computeTargetDelay(duration);
  502. // 优化:直接使用高精度时间戳计算当前时间,避免除法运算
  503. int64_t currentTimeUs = av_gettime_relative();
  504. int64_t timeUs = currentTimeUs - m_baseTimeUs;
  505. // 性能监控:检测延迟累积
  506. m_performanceFrameCount++;
  507. if (m_performanceFrameCount % LowLatencyConfig::DELAY_MONITOR_INTERVAL == 0) {
  508. if (delay > m_lastDelayValue + LowLatencyConfig::DELAY_ACCUMULATION_THRESHOLD) {
  509. // 检测到延迟累积,进行校正
  510. delay *= LowLatencyConfig::DELAY_RESET_FACTOR;
  511. qDebug() << "Delay accumulation detected, correcting delay from" << m_lastDelayValue << "to" << delay;
  512. }
  513. m_lastDelayValue = delay;
  514. }
  515. //qDebug()<<"delay:"<<delay<<endl;
  516. //显示时长未到
  517. int64_t delayUs = static_cast<int64_t>(delay * 1000000.0);
  518. if (timeUs < m_frameTimerUs + delayUs) {
  519. // 优化:使用更精确的睡眠时间计算
  520. int64_t sleepTimeUs = m_frameTimerUs + delayUs - timeUs;
  521. int64_t maxSleepUs = LowLatencyConfig::BALANCED_SYNC_REJUDGE_THRESHOLD_US;
  522. if (sleepTimeUs > maxSleepUs) {
  523. sleepTimeUs = maxSleepUs;
  524. }
  525. QThread::msleep(static_cast<uint32_t>(sleepTimeUs / 1000));
  526. continue;
  527. }
  528. m_frameTimerUs += delayUs;
  529. int64_t maxThresholdUs = LowLatencyConfig::BALANCED_SYNC_THRESHOLD_MAX_US;
  530. if (timeUs - m_frameTimerUs > maxThresholdUs) {
  531. m_frameTimerUs = timeUs;
  532. // 帧定时器校正时重置性能计数器
  533. int64_t correctionThresholdUs = LowLatencyConfig::FRAME_TIMER_CORRECTION_THRESHOLD_US;
  534. if (timeUs - m_frameTimerUs > correctionThresholdUs) {
  535. m_performanceFrameCount = 0;
  536. }
  537. }
  538. //队列中未显示帧一帧以上执行逻辑丢帧判断,倍速播放和逐帧播放
  539. //都不跑进此逻辑,倍速易造成丢帧过多导致界面不流畅
  540. // 平衡:温和的丢帧策略,保证稳定性
  541. if (m_playSpeed == 1.0 && m_decoder->getRemainingVFrame() > 1) { // 恢复为>1,更稳定
  542. MyFrame* nextFrame = m_decoder->peekNextVFrame();
  543. if (nextFrame) {
  544. duration = nextFrame->pts - frame->pts;
  545. //若主时钟超前到大于当前帧理论显示应持续的时间了,则当前帧立即丢弃
  546. // 平衡:使用原始duration阈值,避免过度丢帧
  547. int64_t durationUs = static_cast<int64_t>(duration * 1000000.0);
  548. if (timeUs > m_frameTimerUs + durationUs) {
  549. m_decoder->setNextVFrame();
  550. qDebug() << "abandon vframe (balanced mode)" << Qt::endl;
  551. continue;
  552. }
  553. }
  554. // 温和:基于延迟的丢帧阈值使用配置文件参数
  555. if (delay > LowLatencyConfig::FRAME_DROP_THRESHOLD) { // 使用配置文件中的丢帧阈值
  556. m_decoder->setNextVFrame();
  557. qDebug() << "drop frame due to high delay:" << delay << Qt::endl;
  558. continue;
  559. }
  560. }
  561. displayImage(&frame->frame);
  562. // 无音频时,基于视频时钟更新对外进度
  563. if (!m_hasAudio) {
  564. uint32_t _pts = (uint32_t) (m_videoClock.getClock() / 1000000);
  565. if (m_lastAudPts != _pts) {
  566. emit AVPtsChanged(_pts);
  567. m_lastAudPts = _pts;
  568. }
  569. }
  570. //读索引后移
  571. m_decoder->setNextVFrame();
  572. } else {
  573. QThread::msleep(10);
  574. }
  575. } while (true);
  576. //qDebug()<<"videoCallBack exit"<<endl;
  577. if (m_decoder->isExit()) {
  578. emit AVTerminate();
  579. }
  580. }
  581. double AVPlayer::computeTargetDelay(double delay)
  582. {
  583. // 当选择视频为主时钟时,直接按视频节奏播放(不做同步调整)
  584. if (m_syncType == SYNC_VIDEO) {
  585. return delay;
  586. }
  587. // 统一使用微秒单位进行计算
  588. const int64_t videoUs = m_videoClock.getClock();
  589. // 选择主时钟:音频/外部(二选一,若缺失则回退)
  590. int64_t masterUs = 0;
  591. if (m_syncType == SYNC_AUDIO) {
  592. if (m_hasAudio) {
  593. masterUs = m_audioClock.getClock();
  594. } else {
  595. // 无音频时回退到外部时钟
  596. masterUs = m_extClock.getClock();
  597. }
  598. } else {
  599. // SYNC_EXTERNAL 或其他情况,统一用外部时钟
  600. masterUs = m_extClock.getClock();
  601. }
  602. const int64_t diffUs = videoUs - masterUs; // 正值:视频超前主时钟;负值:视频落后
  603. // 将单帧延时(秒)转为微秒,并在最小/最大阈值之间夹紧
  604. int64_t delayUs = static_cast<int64_t>(delay * 1000000.0);
  605. int64_t syncUs = FFMAX(LowLatencyConfig::BALANCED_SYNC_THRESHOLD_MIN_US,
  606. FFMIN(LowLatencyConfig::BALANCED_SYNC_THRESHOLD_MAX_US, delayUs));
  607. // 放弃同步的阈值(2s)改为微秒单位进行判断
  608. const int64_t noSyncUs = static_cast<int64_t>(LowLatencyConfig::BALANCED_NOSYNC_THRESHOLD * 1000000.0);
  609. const int64_t frameDupUs = static_cast<int64_t>(LowLatencyConfig::BALANCED_SYNC_FRAMEDUP_THRESHOLD * 1000000.0);
  610. if (qAbs(diffUs) < noSyncUs) {
  611. if (diffUs <= -syncUs) {
  612. // 视频落后主时钟:尽快追赶
  613. const int64_t newDelayUs = FFMAX(0LL, diffUs + delayUs);
  614. delay = static_cast<double>(newDelayUs) / 1000000.0;
  615. } else if (diffUs >= syncUs && delayUs > frameDupUs) {
  616. // 视频超前主时钟且帧时长较长:延时一个帧时长 + 超前量
  617. const int64_t newDelayUs = diffUs + delayUs;
  618. delay = static_cast<double>(newDelayUs) / 1000000.0;
  619. } else if (diffUs >= syncUs) {
  620. // 高帧率场景:延时两个视频帧时长
  621. delay = 2.0 * delay;
  622. }
  623. }
  624. return delay;
  625. }
  626. double AVPlayer::vpDuration(MyFrame* lastFrame, MyFrame* curFrame)
  627. {
  628. if (curFrame->serial == lastFrame->serial) {
  629. double duration = curFrame->pts - lastFrame->pts;
  630. if (isnan(duration) || duration > LowLatencyConfig::BALANCED_NOSYNC_THRESHOLD)
  631. return lastFrame->duration;
  632. else
  633. return duration;
  634. } else {
  635. return 0.00;
  636. }
  637. }