av_player.cpp 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484
  1. #include "av_player.h"
  2. #include <QDebug>
  3. #include <QImage>
  4. #include <QThread>
  5. #include "threadpool.h"
  6. #include "vframe.h"
  7. //同步阈值下限
  8. #define AV_SYNC_THRESHOLD_MIN 0.04
  9. //同步阈值上限
  10. #define AV_SYNC_THRESHOLD_MAX 0.1
  11. //单帧视频时长阈值上限,用于适配低帧时同步,
  12. //帧率过低视频帧超前不适合翻倍延迟,应特殊
  13. //处理,这里设置上限一秒10帧
  14. #define AV_SYNC_FRAMEDUP_THRESHOLD 0.1
  15. //同步操作摆烂阈值上限,此时同步已无意义
  16. #define AV_NOSYNC_THRESHOLD 10.0
  17. #define AV_SYNC_REJUDGESHOLD 0.01
  18. AVPlayer::AVPlayer()
  19. : m_decoder(new Decoder)
  20. , m_audioFrame(av_frame_alloc())
  21. , m_imageWidth(300)
  22. , m_imageHeight(300)
  23. , m_swrCtx(nullptr)
  24. , m_swsCtx(nullptr)
  25. , m_buffer(nullptr)
  26. , m_audioBuf(nullptr)
  27. , m_duration(0)
  28. , m_volume(30)
  29. , m_exit(0)
  30. , m_pause(0)
  31. , m_playSpeed(1.0)
  32. {}
  33. AVPlayer::~AVPlayer()
  34. {
  35. av_frame_free(&m_audioFrame);
  36. clearPlayer();
  37. delete m_decoder;
  38. if (m_swrCtx)
  39. swr_free(&m_swrCtx);
  40. if (m_swsCtx)
  41. sws_freeContext(m_swsCtx);
  42. if (m_audioBuf)
  43. av_free(m_audioBuf);
  44. if (m_buffer)
  45. av_free(m_buffer);
  46. }
  47. int AVPlayer::play(const QString& url)
  48. {
  49. clearPlayer();
  50. if (!m_decoder->decode(url)) {
  51. qDebug() << "decode failed";
  52. return 0;
  53. }
  54. //解码成功可获取流时长
  55. m_duration = m_decoder->avDuration();
  56. emit AVDurationChanged(m_duration);
  57. m_pause = 0;
  58. m_clockInitFlag = -1;
  59. if (!initSDL()) {
  60. qDebug() << "init sdl failed!";
  61. return 0;
  62. }
  63. initVideo();
  64. return 1;
  65. }
  66. void fillAStreamCallback(void* userdata, uint8_t* stream, int len)
  67. {
  68. memset(stream, 0, len);
  69. AVPlayer* is = (AVPlayer*) userdata;
  70. static double audioPts = 0.00;
  71. while (len > 0) {
  72. if (is->m_exit)
  73. return;
  74. if (is->m_audioBufIndex >= is->m_audioBufSize) { /*index到缓冲区末尾,重新填充数据*/
  75. int ret = is->m_decoder->getAFrame(is->m_audioFrame);
  76. if (ret) {
  77. is->m_audioBufIndex = 0;
  78. if ((is->m_targetSampleFmt != is->m_audioFrame->format
  79. || is->m_targetChannelLayout != (int64_t)ffmpeg_get_frame_channel_layout(is->m_audioFrame)
  80. || is->m_targetFreq != is->m_audioFrame->sample_rate
  81. || is->m_targetNbSamples != is->m_audioFrame->nb_samples)
  82. && !is->m_swrCtx) {
  83. is->m_swrCtx = ffmpeg_swr_alloc_set_opts(nullptr,
  84. is->m_targetChannelLayout,
  85. is->m_targetSampleFmt,
  86. is->m_targetFreq,
  87. (int64_t)ffmpeg_get_frame_channel_layout(is->m_audioFrame),
  88. (enum AVSampleFormat) is->m_audioFrame->format,
  89. is->m_audioFrame->sample_rate,
  90. 0,
  91. nullptr);
  92. if (!is->m_swrCtx || swr_init(is->m_swrCtx) < 0) {
  93. qDebug() << "swr_init failed";
  94. return;
  95. }
  96. }
  97. if (is->m_swrCtx) {
  98. const uint8_t** in = (const uint8_t**) is->m_audioFrame->extended_data;
  99. int out_count = (uint64_t) is->m_audioFrame->nb_samples * is->m_targetFreq
  100. / is->m_audioFrame->sample_rate
  101. + 256;
  102. int out_size = ffmpeg_get_buffer_size(is->m_targetSampleFmt,
  103. is->m_targetChannels,
  104. out_count,
  105. 0);
  106. if (out_size < 0) {
  107. qDebug() << "av_samples_get_buffer_size failed";
  108. return;
  109. }
  110. av_fast_malloc(&is->m_audioBuf, &is->m_audioBufSize, out_size);
  111. if (!is->m_audioBuf) {
  112. qDebug() << "av_fast_malloc failed";
  113. return;
  114. }
  115. int len2 = swr_convert(is->m_swrCtx,
  116. &is->m_audioBuf,
  117. out_count,
  118. in,
  119. is->m_audioFrame->nb_samples);
  120. if (len2 < 0) {
  121. qDebug() << "swr_convert failed";
  122. return;
  123. }
  124. if (is->m_playSpeed != 1.0) {
  125. sonicSetSpeed(is->m_sonicStream, is->m_playSpeed);
  126. int ret = sonicWriteShortToStream(is->m_sonicStream,
  127. (short*) is->m_audioBuf,
  128. len2);
  129. int availSamples = sonicSamplesAvailable(is->m_sonicStream);
  130. if (!availSamples) {
  131. is->m_audioBufSize = is->m_audioBufIndex;
  132. continue;
  133. }
  134. int numSamples = availSamples;
  135. int bytes = numSamples * is->m_targetChannels
  136. * av_get_bytes_per_sample(is->m_targetSampleFmt);
  137. if (bytes > out_size) {
  138. av_fast_malloc(&is->m_audioBuf, &is->m_audioBufSize, bytes);
  139. }
  140. len2 = sonicReadShortFromStream(is->m_sonicStream,
  141. (short*) is->m_audioBuf,
  142. numSamples);
  143. }
  144. is->m_audioBufSize = len2 * is->m_targetChannels
  145. * av_get_bytes_per_sample(is->m_targetSampleFmt);
  146. } else {
  147. is->m_audioBufSize = ffmpeg_get_buffer_size(is->m_targetSampleFmt,
  148. is->m_targetChannels,
  149. is->m_audioFrame->nb_samples,
  150. 0);
  151. av_fast_malloc(&is->m_audioBuf, &is->m_audioBufSize, is->m_audioBufSize + 256);
  152. if (!is->m_audioBuf) {
  153. qDebug() << "av_fast_malloc failed";
  154. return;
  155. }
  156. memcpy(is->m_audioBuf, is->m_audioFrame->data[0], is->m_audioBufSize);
  157. }
  158. audioPts = is->m_audioFrame->pts
  159. * av_q2d(is->m_fmtCtx->streams[is->m_audioIndex]->time_base);
  160. //qDebug()<<is->m_audioPts;
  161. av_frame_unref(is->m_audioFrame);
  162. } else {
  163. //判断是否真正播放到文件末尾
  164. if (is->m_decoder->isExit()) {
  165. emit is->AVTerminate();
  166. }
  167. return;
  168. }
  169. }
  170. int len1 = is->m_audioBufSize - is->m_audioBufIndex;
  171. len1 = (len1 > len ? len : len1);
  172. SDL_MixAudio(stream, is->m_audioBuf + is->m_audioBufIndex, len1, is->m_volume);
  173. len -= len1;
  174. is->m_audioBufIndex += len1;
  175. stream += len1;
  176. }
  177. //记录音频时钟
  178. is->m_audioClock.setClock(audioPts);
  179. //发送时间戳变化信号,因为进度以整数秒单位变化展示,
  180. //所以大于一秒才发送,避免过于频繁的信号槽通信消耗性能
  181. uint32_t _pts = (uint32_t) audioPts;
  182. if (is->m_lastAudPts != _pts) {
  183. emit is->AVPtsChanged(_pts);
  184. is->m_lastAudPts = _pts;
  185. }
  186. }
  187. int AVPlayer::initSDL()
  188. {
  189. if (SDL_Init(SDL_INIT_AUDIO) != 0) {
  190. qDebug() << "SDL_Init failed";
  191. return 0;
  192. }
  193. m_exit = 0;
  194. m_audioBufSize = 0;
  195. m_audioBufIndex = 0;
  196. m_lastAudPts = -1;
  197. m_audioCodecPar = m_decoder->audioCodecPar();
  198. m_audioIndex = m_decoder->audioIndex();
  199. m_fmtCtx = m_decoder->formatContext();
  200. SDL_AudioSpec wanted_spec;
  201. // wanted_spec.channels = m_audioCodecPar->channels;
  202. wanted_spec.channels = ffmpeg_get_codec_channels(m_fmtCtx->streams[m_audioIndex]);
  203. wanted_spec.freq = m_audioCodecPar->sample_rate;
  204. wanted_spec.format = AUDIO_S16SYS;
  205. wanted_spec.silence = 0;
  206. wanted_spec.callback = fillAStreamCallback;
  207. wanted_spec.userdata = this;
  208. wanted_spec.samples = m_audioCodecPar->frame_size;
  209. if (SDL_OpenAudio(&wanted_spec, nullptr) < 0) {
  210. qDebug() << "SDL_OpenAudio failed";
  211. return 0;
  212. }
  213. m_targetSampleFmt = AV_SAMPLE_FMT_S16;
  214. // m_targetChannels = m_audioCodecPar->channels;
  215. m_targetChannels = ffmpeg_get_codec_channels(m_fmtCtx->streams[m_audioIndex]);
  216. m_targetFreq = m_audioCodecPar->sample_rate;
  217. m_targetChannelLayout = (int64_t)ffmpeg_get_default_channel_layout(m_targetChannels);
  218. m_targetNbSamples = m_audioCodecPar->frame_size;
  219. m_audioIndex = m_decoder->audioIndex();
  220. m_fmtCtx = m_decoder->formatContext();
  221. m_sonicStream = sonicCreateStream(m_targetFreq, m_targetChannels);
  222. sonicSetQuality(m_sonicStream, 1);
  223. SDL_PauseAudio(0);
  224. return 1;
  225. }
  226. int AVPlayer::initVideo()
  227. {
  228. m_frameTimer = 0.00;
  229. m_videoCodecPar = m_decoder->videoCodecPar();
  230. m_videoIndex = m_decoder->videoIndex();
  231. m_imageWidth = m_videoCodecPar->width;
  232. m_imageHeight = m_videoCodecPar->height;
  233. m_dstPixFmt = AV_PIX_FMT_YUV422P;
  234. m_swsFlags = SWS_BICUBIC;
  235. //分配存储转换后帧数据的buffer内存
  236. int bufSize = av_image_get_buffer_size(m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
  237. m_buffer = (uint8_t*) av_realloc(m_buffer, bufSize * sizeof(uint8_t));
  238. av_image_fill_arrays(m_pixels, m_pitch, m_buffer, m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
  239. //视频帧播放回调递插入线程池任务队列
  240. if (!ThreadPool::addTask(std::bind(&AVPlayer::videoCallback, this, std::placeholders::_1),
  241. std::make_shared<int>(0))) {
  242. qDebug() << "videoCallback add task failed!";
  243. }
  244. return 1;
  245. }
  246. void AVPlayer::pause(bool isPause)
  247. {
  248. if (SDL_GetAudioStatus() == SDL_AUDIO_STOPPED)
  249. return;
  250. if (isPause) {
  251. if (SDL_GetAudioStatus() == SDL_AUDIO_PLAYING) {
  252. SDL_PauseAudio(1);
  253. m_pauseTime = av_gettime_relative() / 1000000.0;
  254. m_pause = 1;
  255. }
  256. } else {
  257. if (SDL_GetAudioStatus() == SDL_AUDIO_PAUSED) {
  258. SDL_PauseAudio(0);
  259. m_frameTimer += av_gettime_relative() / 1000000.0 - m_pauseTime;
  260. m_pause = 0;
  261. }
  262. }
  263. }
  264. void AVPlayer::clearPlayer()
  265. {
  266. if (playState() != AV_STOPPED) {
  267. m_exit = 1;
  268. if (playState() == AV_PLAYING)
  269. SDL_PauseAudio(1);
  270. m_decoder->exit();
  271. SDL_CloseAudio();
  272. if (m_swrCtx)
  273. swr_free(&m_swrCtx);
  274. if (m_swsCtx)
  275. sws_freeContext(m_swsCtx);
  276. m_swrCtx = nullptr;
  277. m_swsCtx = nullptr;
  278. sonicDestroyStream(m_sonicStream);
  279. }
  280. }
  281. AVTool::MediaInfo* AVPlayer::detectMediaInfo(const QString& url)
  282. {
  283. return m_decoder->detectMediaInfo(url);
  284. }
  285. AVPlayer::PlayState AVPlayer::playState()
  286. {
  287. AVPlayer::PlayState state;
  288. switch (SDL_GetAudioStatus()) {
  289. case SDL_AUDIO_PLAYING:
  290. state = AVPlayer::AV_PLAYING;
  291. break;
  292. case SDL_AUDIO_PAUSED:
  293. state = AVPlayer::AV_PAUSED;
  294. break;
  295. case SDL_AUDIO_STOPPED:
  296. state = AVPlayer::AV_STOPPED;
  297. break;
  298. default:
  299. break;
  300. }
  301. return state;
  302. }
  303. void AVPlayer::initAVClock()
  304. {
  305. m_audioClock.setClock(0.00);
  306. m_videoClock.setClock(0.00);
  307. m_clockInitFlag = 1;
  308. }
  309. void AVPlayer::displayImage(AVFrame* frame)
  310. {
  311. if (frame) {
  312. //判断若是否需要格式转换
  313. if ((m_videoCodecPar->width != m_imageWidth || m_videoCodecPar->height != m_imageHeight
  314. || m_videoCodecPar->format != m_dstPixFmt)
  315. && !m_swsCtx) {
  316. m_swsCtx = sws_getCachedContext(m_swsCtx,
  317. frame->width,
  318. frame->height,
  319. (enum AVPixelFormat) frame->format,
  320. m_imageWidth,
  321. m_imageHeight,
  322. m_dstPixFmt,
  323. m_swsFlags,
  324. nullptr,
  325. nullptr,
  326. nullptr);
  327. }
  328. if (m_swsCtx) {
  329. sws_scale(m_swsCtx, frame->data, frame->linesize, 0, frame->height, m_pixels, m_pitch);
  330. uint8_t* planes[4] = { m_pixels[0], m_pixels[1], m_pixels[2], m_pixels[3] };
  331. int lines[4] = { m_pitch[0], m_pitch[1], m_pitch[2], m_pitch[3] };
  332. emit frameChanged(QSharedPointer<VideoFrame>::create(m_dstPixFmt,
  333. m_imageWidth,
  334. m_imageHeight,
  335. planes,
  336. lines));
  337. } else {
  338. // 直接使用解码帧的平面数据
  339. emit frameChanged(QSharedPointer<VideoFrame>::create((AVPixelFormat)m_videoCodecPar->format,
  340. m_imageWidth,
  341. m_imageHeight,
  342. frame->data,
  343. frame->linesize));
  344. }
  345. //记录视频时钟
  346. m_videoClock.setClock(frame->pts * av_q2d(m_fmtCtx->streams[m_videoIndex]->time_base));
  347. }
  348. }
  349. void AVPlayer::videoCallback(std::shared_ptr<void> par)
  350. {
  351. double time = 0.00;
  352. double duration = 0.00;
  353. double delay = 0.00;
  354. if (m_clockInitFlag == -1) {
  355. initAVClock();
  356. }
  357. do {
  358. if (m_exit)
  359. break;
  360. if (m_pause) {
  361. std::this_thread::sleep_for(std::chrono::milliseconds(50));
  362. continue;
  363. }
  364. if (m_decoder->getRemainingVFrame()) {
  365. MyFrame* lastFrame = m_decoder->peekLastVFrame();
  366. MyFrame* frame = m_decoder->peekVFrame();
  367. //qDebug()<<"video pts:"<<frame->pts;
  368. if (frame->serial != m_decoder->vidPktSerial()) {
  369. m_decoder->setNextVFrame();
  370. continue;
  371. }
  372. if (frame->serial != lastFrame->serial)
  373. m_frameTimer = av_gettime_relative() / 1000000.0;
  374. duration = vpDuration(lastFrame, frame);
  375. delay = computeTargetDelay(duration);
  376. time = av_gettime_relative() / 1000000.0;
  377. //qDebug()<<"delay:"<<delay<<endl;
  378. //显示时长未到
  379. if (time < m_frameTimer + delay) {
  380. QThread::msleep(
  381. (uint32_t) (FFMIN(AV_SYNC_REJUDGESHOLD, m_frameTimer + delay - time) * 1000));
  382. continue;
  383. }
  384. m_frameTimer += delay;
  385. if (time - m_frameTimer > AV_SYNC_THRESHOLD_MAX)
  386. m_frameTimer = time;
  387. //队列中未显示帧一帧以上执行逻辑丢帧判断,倍速播放和逐帧播放
  388. //都不跑进此逻辑,倍速易造成丢帧过多导致界面不流畅
  389. if (m_playSpeed == 1.0 && m_decoder->getRemainingVFrame() > 1) {
  390. MyFrame* nextFrame = m_decoder->peekNextVFrame();
  391. duration = nextFrame->pts - frame->pts;
  392. //若主时钟超前到大于当前帧理论显示应持续的时间了,则当前帧立即丢弃
  393. if (time > m_frameTimer + duration) {
  394. m_decoder->setNextVFrame();
  395. qDebug() << "abandon vframe" << Qt::endl;
  396. continue;
  397. }
  398. }
  399. displayImage(&frame->frame);
  400. //读索引后移
  401. m_decoder->setNextVFrame();
  402. } else {
  403. QThread::msleep(10);
  404. }
  405. } while (true);
  406. //qDebug()<<"videoCallBack exit"<<endl;
  407. }
  408. double AVPlayer::computeTargetDelay(double delay)
  409. {
  410. //视频当前显示帧与当前播放音频帧时间戳差值
  411. double diff = m_videoClock.getClock() - m_audioClock.getClock();
  412. //计算同步阈值
  413. double sync = FFMAX(AV_SYNC_THRESHOLD_MIN, FFMIN(AV_SYNC_THRESHOLD_MAX, delay));
  414. //不同步时间超过阈值直接放弃同步
  415. if (!isnan(diff) && fabs(diff) < AV_NOSYNC_THRESHOLD) {
  416. if (diff
  417. <= -sync) { //视频已落后音频大于一帧的显示时长,delay值应为0,立马将当前帧显示追赶音频
  418. delay = FFMAX(0, diff + delay);
  419. } else if (diff >= sync
  420. && delay
  421. > AV_SYNC_FRAMEDUP_THRESHOLD) { //视频超前音频大于一个视频帧的时间,延时一个视频帧时间+已超时时间,下次判定将至少被延时到下个将要显示的视频帧pts
  422. delay = diff + delay;
  423. } else if (diff >= sync) { //高帧率视频直接延时两个视频帧时间;;;;
  424. delay = 2 * delay;
  425. }
  426. }
  427. return delay;
  428. }
  429. double AVPlayer::vpDuration(MyFrame* lastFrame, MyFrame* curFrame)
  430. {
  431. if (curFrame->serial == lastFrame->serial) {
  432. double duration = curFrame->pts - lastFrame->pts;
  433. if (isnan(duration) || duration > AV_NOSYNC_THRESHOLD)
  434. return lastFrame->duration;
  435. else
  436. return duration;
  437. } else {
  438. return 0.00;
  439. }
  440. }