video_play_thread.cpp 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576
  1. // ***********************************************************/
  2. // video_play_thread.cpp
  3. //
  4. // Copy Right @ Steven Huang. All rights reserved.
  5. //
  6. // Video play thread. This section includes key code for
  7. // synchronizing video frames and audio using pts/dts,
  8. // as well as subtitle processing.
  9. // ***********************************************************/
  10. #include "video_play_thread.h"
  11. #include "AVPlayer2/playercontroller.h"
  12. #include <QLoggingCategory>
  13. Q_LOGGING_CATEGORY(playerControllerVideoPlayThread, "player.controller.VideoPlayThread")
  14. extern int framedrop;
  15. const QRegularExpression VideoPlayThread::m_assFilter = QRegularExpression("{\\\\.*?}");
  16. const QRegularExpression VideoPlayThread::m_assNewLineReplacer = QRegularExpression("\\\\n|\\\\N");
  17. Q_DECLARE_METATYPE(AVFrame*)
  18. VideoPlayThread::VideoPlayThread(VideoState* pState)
  19. : m_pState(pState)
  20. {
  21. qRegisterMetaType<AVFrame*>();
  22. }
  23. VideoPlayThread::~VideoPlayThread()
  24. {
  25. final_resample_param();
  26. }
  27. void VideoPlayThread::run()
  28. {
  29. //qCDebug(playerControllerVideoPlayThread) << "[VideoPlayThread] run start, m_pState:" << (void*)m_pState;
  30. assert(m_pState);
  31. VideoState* is = m_pState;
  32. // qCDebug(playerControllerVideoPlayThread) << "[VideoPlayThread] VideoState* is:" << (void*)is << ", abort_request:" << is->abort_request;
  33. double remaining_time = 0.0;
  34. int loop_count = 0;
  35. for (;;) {
  36. if (isExit()) {
  37. // qCDebug(playerControllerVideoPlayThread) << "[VideoPlayThread] m_exit set, exit.";
  38. break;
  39. }
  40. if (is->abort_request) {
  41. // qCDebug(playerControllerVideoPlayThread) << "[VideoPlayThread] abort_request set, exit.";
  42. break;
  43. }
  44. if (is->eof && frame_queue_nb_remaining(&is->pictq) == 0) {
  45. // qCDebug(playerControllerVideoPlayThread) << "[VideoPlayThread] eof且队列空,退出线程";
  46. break; // 线程退出
  47. }
  48. if (is->paused) {
  49. // qCDebug(playerControllerVideoPlayThread) << "[VideoPlayThread] paused, wait.";
  50. std::unique_lock<std::mutex> lock(m_mutex);
  51. m_cv.wait_for(lock, std::chrono::milliseconds(10), [this] { return isExit(); });
  52. continue;
  53. }
  54. // qCDebug(playerControllerVideoPlayThread) << "[VideoPlayThread] call video_refresh, loop:" << loop_count << ", pictq size:" << (is ? is->pictq.size : -1) << ", remaining_time:" << remaining_time;
  55. if (remaining_time > 0.0)
  56. av_usleep((int64_t) (remaining_time * 1000000.0));
  57. remaining_time = REFRESH_RATE;
  58. if ((!is->paused || is->force_refresh)) {
  59. // qCDebug(playerControllerVideoPlayThread) << "[VideoPlayThread] call video_refresh, loop:" << loop_count++;
  60. video_refresh(is, &remaining_time);
  61. }
  62. }
  63. //qCDebug(playerControllerVideoPlayThread) << "[VideoPlayThread] run end, abort_request:" << is->abort_request
  64. // << ", m_exit:" << (m_exit ? m_exit->load() : -1);
  65. }
  66. void VideoPlayThread::video_refresh(VideoState* is, double* remaining_time)
  67. {
  68. double time;
  69. Frame *sp, *sp2;
  70. if (!is->paused && get_master_sync_type(is) == AV_SYNC_EXTERNAL_CLOCK && is->realtime)
  71. check_external_clock_speed(is);
  72. // 定期更新水印显示的时间信息(每秒更新一次)
  73. static double last_watermark_update = 0.0;
  74. double current_time_sec = av_gettime_relative() / 1000000.0;
  75. if (current_time_sec - last_watermark_update >= 1.0) {
  76. set_video_watermark(is, "Time");
  77. last_watermark_update = current_time_sec;
  78. }
  79. if (is->video_st) {
  80. retry:
  81. // ffplay方案:eof且只剩最后一帧时,消耗掉最后一帧
  82. if (is->eof && frame_queue_nb_remaining(&is->pictq) == 1) {
  83. frame_queue_next(&is->pictq);
  84. // 这里可以考虑发信号通知播放结束
  85. return;
  86. }
  87. if (frame_queue_nb_remaining(&is->pictq) == 0) {
  88. // nothing to do, no picture to display in the queue
  89. *remaining_time = REFRESH_RATE;
  90. } else {
  91. double last_duration, duration, delay;
  92. Frame *vp, *lastvp;
  93. /* dequeue the picture */
  94. lastvp = frame_queue_peek_last(&is->pictq);
  95. vp = frame_queue_peek(&is->pictq);
  96. if (vp->serial != is->videoq.serial) {
  97. frame_queue_next(&is->pictq);
  98. goto retry;
  99. }
  100. if (lastvp->serial != vp->serial)
  101. is->frame_timer = av_gettime_relative() / 1000000.0;
  102. if (is->paused)
  103. goto display;
  104. /* compute nominal last_duration */
  105. last_duration = vp_duration(is, lastvp, vp);
  106. delay = compute_target_delay(last_duration, is);
  107. time = av_gettime_relative() / 1000000.0;
  108. if (time < is->frame_timer + delay) {
  109. *remaining_time = FFMIN(is->frame_timer + delay - time, *remaining_time);
  110. goto display;
  111. }
  112. is->frame_timer += delay;
  113. if (delay > 0 && time - is->frame_timer > AV_SYNC_THRESHOLD_MAX)
  114. is->frame_timer = time;
  115. is->pictq.mutex->lock();
  116. if (!isnan(vp->pts))
  117. update_video_pts(is, vp->pts, vp->pos, vp->serial);
  118. is->pictq.mutex->unlock();
  119. if (frame_queue_nb_remaining(&is->pictq) > 1) {
  120. Frame* nextvp = frame_queue_peek_next(&is->pictq);
  121. duration = vp_duration(is, vp, nextvp);
  122. if (!is->step
  123. && (framedrop > 0
  124. || (framedrop && get_master_sync_type(is) != AV_SYNC_VIDEO_MASTER))
  125. && time > is->frame_timer + duration) {
  126. is->frame_drops_late++;
  127. frame_queue_next(&is->pictq);
  128. goto retry;
  129. }
  130. }
  131. if (is->subtitle_st) {
  132. while (frame_queue_nb_remaining(&is->subpq) > 0) {
  133. sp = frame_queue_peek(&is->subpq);
  134. if (frame_queue_nb_remaining(&is->subpq) > 1)
  135. sp2 = frame_queue_peek_next(&is->subpq);
  136. else
  137. sp2 = nullptr;
  138. if (sp->serial != is->subtitleq.serial
  139. || (is->vidclk.pts > (sp->pts + ((float) sp->sub.end_display_time / 1000)))
  140. || (sp2
  141. && is->vidclk.pts
  142. > (sp2->pts + ((float) sp2->sub.start_display_time / 1000)))) {
  143. #if 0
  144. if (sp->uploaded) {
  145. int i;
  146. for (i = 0; i < sp->sub.num_rects; i++) {
  147. AVSubtitleRect* sub_rect = sp->sub.rects[i];
  148. /*uint8_t* pixels;
  149. int pitch, j;
  150. if (!SDL_LockTexture(is->sub_texture, (SDL_Rect*)sub_rect, (void**)&pixels, &pitch)) {
  151. for (j = 0; j < sub_rect->h; j++, pixels += pitch)
  152. memset(pixels, 0, sub_rect->w << 2);
  153. SDL_UnlockTexture(is->sub_texture);
  154. }*/
  155. }
  156. }
  157. #endif
  158. frame_queue_next(&is->subpq);
  159. } else {
  160. break;
  161. }
  162. }
  163. }
  164. frame_queue_next(&is->pictq);
  165. is->force_refresh = 1;
  166. if (is->step && !is->paused)
  167. toggle_pause(is, !is->step);
  168. }
  169. display:
  170. /* display picture */
  171. if (is->force_refresh && is->pictq.rindex_shown)
  172. video_display(is);
  173. }
  174. is->force_refresh = 0;
  175. }
  176. void VideoPlayThread::video_display(VideoState* is)
  177. {
  178. if (is->audio_st && false) {
  179. // video_audio_display(is);
  180. } else if (is->video_st) {
  181. video_image_display(is);
  182. }
  183. }
  184. #if 0
  185. void VideoPlayThread::video_audio_display(VideoState* s)
  186. {
  187. int64_t audio_callback_time = 0;
  188. int i, i_start, x, y1, y, ys, delay, n, nb_display_channels;
  189. int ch, channels, h, h2;
  190. int64_t time_diff;
  191. int rdft_bits, nb_freq;
  192. for (rdft_bits = 1; (1 << rdft_bits) < 2 * s->height; rdft_bits++)
  193. ;
  194. nb_freq = 1 << (rdft_bits - 1);
  195. /* compute display index : center on currently output samples */
  196. channels = s->audio_tgt.channels;
  197. nb_display_channels = channels;
  198. if (!s->paused) {
  199. int data_used = (2 * nb_freq);
  200. n = 2 * channels;
  201. delay = s->audio_write_buf_size;
  202. delay /= n;
  203. /* to be more precise, we take into account the time spent since
  204. the last buffer computation */
  205. if (audio_callback_time) {
  206. time_diff = av_gettime_relative() - audio_callback_time;
  207. delay -= (time_diff * s->audio_tgt.freq) / 1000000;
  208. }
  209. delay += 2 * data_used;
  210. if (delay < data_used)
  211. delay = data_used;
  212. i_start = x = compute_mod(s->sample_array_index - delay * channels, SAMPLE_ARRAY_SIZE);
  213. /*if (s->show_mode == SHOW_MODE_WAVES) {
  214. h = INT_MIN;
  215. for (i = 0; i < 1000; i += channels) {
  216. int idx = (SAMPLE_ARRAY_SIZE + x - i) % SAMPLE_ARRAY_SIZE;
  217. int a = s->sample_array[idx];
  218. int b = s->sample_array[(idx + 4 * channels) % SAMPLE_ARRAY_SIZE];
  219. int c = s->sample_array[(idx + 5 * channels) % SAMPLE_ARRAY_SIZE];
  220. int d = s->sample_array[(idx + 9 * channels) % SAMPLE_ARRAY_SIZE];
  221. int score = a - d;
  222. if (h < score && (b ^ c) < 0) {
  223. h = score;
  224. i_start = idx;
  225. }
  226. }
  227. }*/
  228. s->last_i_start = i_start;
  229. }
  230. else {
  231. i_start = s->last_i_start;
  232. }
  233. #if 0
  234. if (s->show_mode == SHOW_MODE_WAVES) {
  235. SDL_SetRenderDrawColor(renderer, 255, 255, 255, 255);
  236. /* total height for one channel */
  237. h = s->height / nb_display_channels;
  238. /* graph height / 2 */
  239. h2 = (h * 9) / 20;
  240. for (ch = 0; ch < nb_display_channels; ch++) {
  241. i = i_start + ch;
  242. y1 = s->ytop + ch * h + (h / 2); /* position of center line */
  243. for (x = 0; x < s->width; x++) {
  244. y = (s->sample_array[i] * h2) >> 15;
  245. if (y < 0) {
  246. y = -y;
  247. ys = y1 - y;
  248. }
  249. else {
  250. ys = y1;
  251. }
  252. fill_rectangle(s->xleft + x, ys, 1, y);
  253. i += channels;
  254. if (i >= SAMPLE_ARRAY_SIZE)
  255. i -= SAMPLE_ARRAY_SIZE;
  256. }
  257. }
  258. SDL_SetRenderDrawColor(renderer, 0, 0, 255, 255);
  259. for (ch = 1; ch < nb_display_channels; ch++) {
  260. y = s->ytop + ch * h;
  261. fill_rectangle(s->xleft, y, s->width, 1);
  262. }
  263. }
  264. else {
  265. if (realloc_texture(&s->vis_texture, SDL_PIXELFORMAT_ARGB8888, s->width, s->height, SDL_BLENDMODE_NONE, 1) < 0)
  266. return;
  267. if (s->xpos >= s->width)
  268. s->xpos = 0;
  269. nb_display_channels = FFMIN(nb_display_channels, 2);
  270. if (rdft_bits != s->rdft_bits) {
  271. av_rdft_end(s->rdft);
  272. av_free(s->rdft_data);
  273. s->rdft = av_rdft_init(rdft_bits, DFT_R2C);
  274. s->rdft_bits = rdft_bits;
  275. s->rdft_data = av_malloc_array(nb_freq, 4 * sizeof(*s->rdft_data));
  276. }
  277. if (!s->rdft || !s->rdft_data) {
  278. av_log(nullptr, AV_LOG_ERROR, "Failed to allocate buffers for RDFT, switching to waves display\n");
  279. s->show_mode = SHOW_MODE_WAVES;
  280. }
  281. else {
  282. FFTSample* data[2];
  283. SDL_Rect rect = { .x = s->xpos, .y = 0, .w = 1, .h = s->height };
  284. uint32_t* pixels;
  285. int pitch;
  286. for (ch = 0; ch < nb_display_channels; ch++) {
  287. data[ch] = s->rdft_data + 2 * nb_freq * ch;
  288. i = i_start + ch;
  289. for (x = 0; x < 2 * nb_freq; x++) {
  290. double w = (x - nb_freq) * (1.0 / nb_freq);
  291. data[ch][x] = s->sample_array[i] * (1.0 - w * w);
  292. i += channels;
  293. if (i >= SAMPLE_ARRAY_SIZE)
  294. i -= SAMPLE_ARRAY_SIZE;
  295. }
  296. av_rdft_calc(s->rdft, data[ch]);
  297. }
  298. /* Least efficient way to do this, we should of course
  299. * directly access it but it is more than fast enough. */
  300. if (!SDL_LockTexture(s->vis_texture, &rect, (void**)&pixels, &pitch)) {
  301. pitch >>= 2;
  302. pixels += pitch * s->height;
  303. for (y = 0; y < s->height; y++) {
  304. double w = 1 / sqrt(nb_freq);
  305. int a = sqrt(w * sqrt(data[0][2 * y + 0] * data[0][2 * y + 0] + data[0][2 * y + 1] * data[0][2 * y + 1]));
  306. int b = (nb_display_channels == 2) ? sqrt(w * hypot(data[1][2 * y + 0], data[1][2 * y + 1]))
  307. : a;
  308. a = FFMIN(a, 255);
  309. b = FFMIN(b, 255);
  310. pixels -= pitch;
  311. *pixels = (a << 16) + (b << 8) + ((a + b) >> 1);
  312. }
  313. SDL_UnlockTexture(s->vis_texture);
  314. }
  315. SDL_RenderCopy(renderer, s->vis_texture, nullptr, nullptr);
  316. }
  317. if (!s->paused)
  318. s->xpos++;
  319. }
  320. #endif
  321. }
  322. #endif
  323. void VideoPlayThread::video_image_display(VideoState* is)
  324. {
  325. Frame* sp = nullptr;
  326. Frame* vp = frame_queue_peek_last(&is->pictq);
  327. Video_Resample* pResample = &m_Resample;
  328. if (frame_queue_nb_remaining(&is->subpq) > 0) {
  329. sp = frame_queue_peek(&is->subpq);
  330. if (vp->pts >= sp->pts + ((float) sp->sub.start_display_time / 1000)) {
  331. if (!sp->uploaded) {
  332. // uint8_t* pixels[4];
  333. // int pitch[4];
  334. if (!sp->width || !sp->height) {
  335. sp->width = vp->width;
  336. sp->height = vp->height;
  337. }
  338. // if (realloc_texture(&is->sub_texture, SDL_PIXELFORMAT_ARGB8888,
  339. // sp->width, sp->height, SDL_BLENDMODE_BLEND, 1) < 0) return;
  340. #if 1
  341. for (unsigned int i = 0; i < sp->sub.num_rects; i++) {
  342. AVSubtitleRect* sub_rect = sp->sub.rects[i];
  343. if (sub_rect->type == SUBTITLE_ASS) {
  344. qDebug("subtitle[%d], format:%d, type:%d, text:%s, flags:%d",
  345. i,
  346. sp->sub.format,
  347. sub_rect->type,
  348. sub_rect->text,
  349. sub_rect->flags);
  350. // QString ass = QString::fromUtf8(sub_rect->ass);
  351. QString ass = QString::fromLocal8Bit(
  352. QString::fromStdString(sub_rect->ass).toUtf8());
  353. QStringList assList = ass.split(",");
  354. if (assList.size() > 8) {
  355. ass = assList[8];
  356. qDebug("ass: %s", qUtf8Printable(ass));
  357. parse_subtitle_ass(ass);
  358. }
  359. } else {
  360. qWarning("not handled yet, type:%d", sub_rect->type);
  361. }
  362. }
  363. #else
  364. for (i = 0; i < sp->sub.num_rects; i++) {
  365. AVSubtitleRect* sub_rect = sp->sub.rects[i];
  366. sub_rect->x = av_clip(sub_rect->x, 0, sp->width);
  367. sub_rect->y = av_clip(sub_rect->y, 0, sp->height);
  368. sub_rect->w = av_clip(sub_rect->w, 0, sp->width - sub_rect->x);
  369. sub_rect->h = av_clip(sub_rect->h, 0, sp->height - sub_rect->y);
  370. is->sub_convert_ctx = sws_getCachedContext(is->sub_convert_ctx,
  371. sub_rect->w,
  372. sub_rect->h,
  373. AV_PIX_FMT_PAL8,
  374. sub_rect->w,
  375. sub_rect->h,
  376. AV_PIX_FMT_BGRA,
  377. 0,
  378. nullptr,
  379. nullptr,
  380. nullptr);
  381. if (!is->sub_convert_ctx) {
  382. av_log(nullptr, AV_LOG_FATAL, "Cannot initialize the conversion context\n");
  383. return;
  384. }
  385. #if 1
  386. sws_scale(is->sub_convert_ctx,
  387. (const uint8_t* const*) sub_rect->data,
  388. sub_rect->linesize,
  389. 0,
  390. sub_rect->h,
  391. pixels,
  392. pitch);
  393. #else
  394. if (!SDL_LockTexture(is->sub_texture,
  395. (SDL_Rect*) sub_rect,
  396. (void**) pixels,
  397. pitch)) {
  398. sws_scale(is->sub_convert_ctx,
  399. (const uint8_t* const*) sub_rect->data,
  400. sub_rect->linesize,
  401. 0,
  402. sub_rect->h,
  403. pixels,
  404. pitch);
  405. SDL_UnlockTexture(is->sub_texture);
  406. }
  407. #endif
  408. }
  409. #endif
  410. sp->uploaded = 1;
  411. }
  412. } else {
  413. sp = nullptr;
  414. }
  415. }
  416. AVFrame* pFrameRGB = pResample->pFrameRGB; // dst
  417. AVCodecContext* pVideoCtx = is->viddec.avctx;
  418. AVFrame* pFrame = vp->frame;
  419. // AVPixelFormat fmt = (AVPixelFormat)pFrame->format; // 0
  420. // const char* fmt_name = av_get_pix_fmt_name(fmt);
  421. // AVHWFramesContext* ctx =
  422. // (AVHWFramesContext*)pVideoCtx->hw_frames_ctx->data; AVPixelFormat sw_fmt =
  423. // ctx->sw_format;
  424. // qDebug("frame w:%d,h:%d, pts:%lld, dts:%lld", pVideoCtx->width,
  425. // pVideoCtx->height, pFrame->pts, pFrame->pkt_dts);
  426. // TODO: 不转换
  427. sws_scale(pResample->sws_ctx,
  428. (uint8_t const* const*) pFrame->data,
  429. pFrame->linesize,
  430. 0,
  431. pVideoCtx->height,
  432. pFrameRGB->data,
  433. pFrameRGB->linesize);
  434. // QImage img(pVideoCtx->width, pVideoCtx->height, QImage::Format_RGB888);
  435. // for (int y = 0; y < pVideoCtx->height; ++y) {
  436. // memcpy(img.scanLine(y),
  437. // pFrameRGB->data[0] + y * pFrameRGB->linesize[0],
  438. // pVideoCtx->width * 3);
  439. // }
  440. // emit frame_ready(img);
  441. if (m_onFrameReady) m_onFrameReady(pFrameRGB);
  442. }
  443. bool VideoPlayThread::init_resample_param(AVCodecContext* pVideo, bool bHardware)
  444. {
  445. Video_Resample* pResample = &m_Resample;
  446. if (pVideo) {
  447. enum AVPixelFormat pix_fmt = pVideo->pix_fmt; // frame format after decode
  448. if (bHardware)
  449. pix_fmt = AV_PIX_FMT_NV12;
  450. struct SwsContext* sws_ctx
  451. = sws_getContext(pVideo->width,
  452. pVideo->height,
  453. pix_fmt, // AV_PIX_FMT_YUV420P
  454. pVideo->width,
  455. pVideo->height,
  456. AV_PIX_FMT_RGB24, // sws_scale destination color scheme
  457. SWS_BILINEAR,
  458. nullptr,
  459. nullptr,
  460. nullptr);
  461. AVFrame* pFrameRGB = av_frame_alloc();
  462. if (!pFrameRGB) {
  463. printf("Could not allocate rgb frame.\n");
  464. return false;
  465. }
  466. pFrameRGB->width = pVideo->width;
  467. pFrameRGB->height = pVideo->height;
  468. pFrameRGB->format = AV_PIX_FMT_RGB24;
  469. int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB24, pVideo->width, pVideo->height, 32);
  470. uint8_t* const buffer_RGB = (uint8_t*) av_malloc(numBytes * sizeof(uint8_t));
  471. if (!buffer_RGB) {
  472. printf("Could not allocate buffer.\n");
  473. return false;
  474. }
  475. av_image_fill_arrays(pFrameRGB->data,
  476. pFrameRGB->linesize,
  477. buffer_RGB,
  478. AV_PIX_FMT_RGB24,
  479. pVideo->width,
  480. pVideo->height,
  481. 32);
  482. pResample->sws_ctx = sws_ctx;
  483. pResample->pFrameRGB = pFrameRGB;
  484. pResample->buffer_RGB = buffer_RGB;
  485. return true;
  486. }
  487. return false;
  488. }
  489. void VideoPlayThread::final_resample_param()
  490. {
  491. Video_Resample* pResample = &m_Resample;
  492. // Free video resample context
  493. sws_freeContext(pResample->sws_ctx);
  494. // Free the RGB image
  495. av_free(pResample->buffer_RGB);
  496. av_frame_free(&pResample->pFrameRGB);
  497. av_free(pResample->pFrameRGB);
  498. }
  499. void VideoPlayThread::parse_subtitle_ass(const QString& text)
  500. {
  501. QString str = text;
  502. str.remove(m_assFilter);
  503. str.replace(m_assNewLineReplacer, "\n");
  504. str = str.trimmed();
  505. // emit subtitle_ready(str);
  506. if (m_onSubtitleReady) m_onSubtitleReady(str);
  507. }