video_state.cpp 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798
  1. #include "video_state.h"
  2. int infinite_buffer = -1;
  3. int64_t start_time = AV_NOPTS_VALUE;
  4. static enum AVPixelFormat hw_pix_fmt;
  5. VideoStateData::VideoStateData(bool use_hardware, bool loop_play)
  6. : m_bUseHardware(use_hardware)
  7. , m_bLoopPlay(loop_play)
  8. {}
  9. VideoStateData::~VideoStateData()
  10. {
  11. close_hardware();
  12. delete_video_state();
  13. }
  14. void VideoStateData::delete_video_state()
  15. {
  16. if (m_pState) {
  17. stream_close(m_pState);
  18. m_pState = nullptr;
  19. }
  20. }
  21. VideoState* VideoStateData::get_state() const
  22. {
  23. return m_pState;
  24. }
  25. bool VideoStateData::is_hardware_decode() const
  26. {
  27. return m_bHardwareSuccess;
  28. }
  29. int VideoStateData::create_video_state(const char* filename)
  30. {
  31. int ret = -1;
  32. if (!filename || !filename[0]) {
  33. qDebug("filename is invalid, please select a valid media file.");
  34. return ret;
  35. }
  36. m_pState = stream_open(filename);
  37. if (!m_pState) {
  38. qDebug("stream_open failed!");
  39. return ret;
  40. }
  41. return open_media(m_pState);
  42. }
  43. void VideoStateData::print_state() const
  44. {
  45. if (const auto is = m_pState) {
  46. qDebug("[VideoState]PacketQueue(v:%p,a:%p,s:%p)", &is->videoq, &is->audioq, &is->subtitleq);
  47. qDebug("[VideoState]FrameQueue(v:%p,a:%p,s:%p)", &is->pictq, &is->sampq, &is->subpq);
  48. qDebug("[VideoState]Decoder(v:%p,a:%p,s:%p)", &is->viddec, &is->auddec, &is->subdec);
  49. qDebug("[VideoState]Clock(v:%p,a:%p,s:%p)", &is->vidclk, &is->audclk, &is->extclk);
  50. }
  51. }
  52. int decode_interrupt_cb(void* ctx)
  53. {
  54. VideoState* is = (VideoState*) ctx;
  55. return is->abort_request;
  56. }
  57. int VideoStateData::open_media(VideoState* is)
  58. {
  59. assert(is);
  60. int err;
  61. uint i;
  62. int ret = -1;
  63. int st_index[AVMEDIA_TYPE_NB];
  64. AVFormatContext* ic = nullptr;
  65. const char* wanted_stream_spec[AVMEDIA_TYPE_NB] = {0};
  66. memset(st_index, -1, sizeof(st_index));
  67. is->eof = 0;
  68. ic = avformat_alloc_context();
  69. if (!ic) {
  70. av_log(nullptr, AV_LOG_FATAL, "Could not allocate context.\n");
  71. ret = AVERROR(ENOMEM);
  72. goto fail;
  73. }
  74. ic->interrupt_callback.callback = decode_interrupt_cb;
  75. ic->interrupt_callback.opaque = is;
  76. err = avformat_open_input(&ic, is->filename, is->iformat, nullptr);
  77. if (err < 0) {
  78. av_log(nullptr, AV_LOG_FATAL, "failed to open %s: %d", is->filename, err);
  79. ret = -1;
  80. goto fail;
  81. }
  82. // if (!av_dict_get(format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE)) {
  83. // av_dict_set(&format_opts, "scan_all_pmts", "1", AV_DICT_DONT_OVERWRITE);
  84. // scan_all_pmts_set = 1;
  85. // }
  86. // err = avformat_open_input(&ic, is->filename, is->iformat, &format_opts);
  87. // if (err < 0) {
  88. // print_error(is->filename, err);
  89. // ret = -1;
  90. // goto fail;
  91. // }
  92. // if (scan_all_pmts_set)
  93. // av_dict_set(&format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE);
  94. // remove_avoptions(&format_opts, codec_opts);
  95. // ret = check_avoptions(format_opts);
  96. // if (ret < 0)
  97. // goto fail;
  98. is->ic = ic;
  99. // if (genpts)
  100. // ic->flags |= AVFMT_FLAG_GENPTS;
  101. // if (find_stream_info) {
  102. // AVDictionary **opts;
  103. // int orig_nb_streams = ic->nb_streams;
  104. // err = setup_find_stream_info_opts(ic, codec_opts, &opts);
  105. // if (err < 0) {
  106. // av_log(NULL, AV_LOG_ERROR,
  107. // "Error setting up avformat_find_stream_info() options\n");
  108. // ret = err;
  109. // goto fail;
  110. // }
  111. // err = avformat_find_stream_info(ic, opts);
  112. // for (i = 0; i < orig_nb_streams; i++)
  113. // av_dict_free(&opts[i]);
  114. // av_freep(&opts);
  115. // if (err < 0) {
  116. // av_log(NULL, AV_LOG_WARNING,
  117. // "%s: could not find codec parameters\n", is->filename);
  118. // ret = -1;
  119. // goto fail;
  120. // }
  121. // }
  122. av_format_inject_global_side_data(ic);
  123. err = avformat_find_stream_info(ic, nullptr);
  124. if (err < 0) {
  125. av_log(nullptr, AV_LOG_WARNING, "%s: could not find codec parameters\n", is->filename);
  126. ret = -1;
  127. goto fail;
  128. }
  129. if (ic->pb) {
  130. // FIXME hack, ffplay maybe should not use avio_feof() to test for the end
  131. ic->pb->eof_reached = 0;
  132. }
  133. if (seek_by_bytes < 0)
  134. seek_by_bytes = !(ic->iformat->flags & AVFMT_NO_BYTE_SEEK)
  135. && !!(ic->iformat->flags & AVFMT_TS_DISCONT)
  136. && strcmp("ogg", ic->iformat->name);
  137. // is->max_frame_duration = (ic->iformat->flags & AVFMT_TS_DISCONT) ? 10.0 : 3600.0;
  138. is->max_frame_duration = 2.0;
  139. /* if seeking requested, we execute it */
  140. if (start_time != AV_NOPTS_VALUE) {
  141. int64_t timestamp;
  142. timestamp = start_time;
  143. /* add the stream start time */
  144. if (ic->start_time != AV_NOPTS_VALUE)
  145. timestamp += ic->start_time;
  146. ret = avformat_seek_file(ic, -1, INT64_MIN, timestamp, INT64_MAX, 0);
  147. if (ret < 0) {
  148. av_log(NULL,
  149. AV_LOG_WARNING,
  150. "%s: could not seek to position %0.3f\n",
  151. is->filename,
  152. (double) timestamp / AV_TIME_BASE);
  153. }
  154. }
  155. is->realtime = is_realtime(ic);
  156. // if (show_status)
  157. av_dump_format(ic, 0, is->filename, 0);
  158. for (i = 0; i < ic->nb_streams; i++) {
  159. AVStream* st = ic->streams[i];
  160. enum AVMediaType type = st->codecpar->codec_type;
  161. st->discard = AVDISCARD_ALL;
  162. if (type >= 0 && wanted_stream_spec[type] && st_index[type] == -1)
  163. if (avformat_match_stream_specifier(ic, st, wanted_stream_spec[type]) > 0)
  164. st_index[type] = i;
  165. }
  166. for (i = 0; i < AVMEDIA_TYPE_NB; i++) {
  167. if (wanted_stream_spec[i] && st_index[i] == -1) {
  168. av_log(nullptr,
  169. AV_LOG_ERROR,
  170. "Stream specifier %s does not match any %s stream\n",
  171. wanted_stream_spec[i],
  172. av_get_media_type_string(AVMediaType(i)));
  173. st_index[i] = INT_MAX;
  174. }
  175. }
  176. // if (!video_disable)
  177. st_index[AVMEDIA_TYPE_VIDEO]
  178. = av_find_best_stream(ic, AVMEDIA_TYPE_VIDEO, st_index[AVMEDIA_TYPE_VIDEO], -1, NULL, 0);
  179. // if (!audio_disable)
  180. st_index[AVMEDIA_TYPE_AUDIO] = av_find_best_stream(ic,
  181. AVMEDIA_TYPE_AUDIO,
  182. st_index[AVMEDIA_TYPE_AUDIO],
  183. st_index[AVMEDIA_TYPE_VIDEO],
  184. NULL,
  185. 0);
  186. // if (!video_disable && !subtitle_disable)
  187. st_index[AVMEDIA_TYPE_SUBTITLE] = av_find_best_stream(ic,
  188. AVMEDIA_TYPE_SUBTITLE,
  189. st_index[AVMEDIA_TYPE_SUBTITLE],
  190. (st_index[AVMEDIA_TYPE_AUDIO] >= 0
  191. ? st_index[AVMEDIA_TYPE_AUDIO]
  192. : st_index[AVMEDIA_TYPE_VIDEO]),
  193. NULL,
  194. 0);
  195. /* open the streams */
  196. if (st_index[AVMEDIA_TYPE_VIDEO] >= 0) {
  197. stream_component_open(is, st_index[AVMEDIA_TYPE_VIDEO]);
  198. }
  199. if (st_index[AVMEDIA_TYPE_AUDIO] >= 0) {
  200. stream_component_open(is, st_index[AVMEDIA_TYPE_AUDIO]);
  201. }
  202. if (st_index[AVMEDIA_TYPE_SUBTITLE] >= 0) {
  203. stream_component_open(is, st_index[AVMEDIA_TYPE_SUBTITLE]);
  204. }
  205. if (is->video_stream < 0 && is->audio_stream < 0) {
  206. av_log(nullptr,
  207. AV_LOG_FATAL,
  208. "Failed to open file '%s' or configure filtergraph\n",
  209. is->filename);
  210. ret = -1;
  211. goto fail;
  212. }
  213. if (infinite_buffer < 0 && is->realtime)
  214. infinite_buffer = 1;
  215. return 0;
  216. fail:
  217. if (ic && !is->ic)
  218. avformat_close_input(&ic);
  219. return ret;
  220. }
  221. VideoState* VideoStateData::stream_open(const char* filename, const AVInputFormat* iformat)
  222. {
  223. VideoState* is = nullptr;
  224. int startup_volume = 100;
  225. int av_sync_type = AV_SYNC_AUDIO_MASTER;
  226. is = (VideoState*) av_mallocz(sizeof(VideoState));
  227. if (!is)
  228. return nullptr;
  229. is->last_video_stream = is->video_stream = -1;
  230. is->last_audio_stream = is->audio_stream = -1;
  231. is->last_subtitle_stream = is->subtitle_stream = -1;
  232. is->filename = av_strdup(filename);
  233. if (!is->filename)
  234. goto fail;
  235. is->iformat = iformat;
  236. is->ytop = 0;
  237. is->xleft = 0;
  238. /* start video display */
  239. if (frame_queue_init(&is->pictq, &is->videoq, VIDEO_PICTURE_QUEUE_SIZE, 1) < 0)
  240. goto fail;
  241. if (frame_queue_init(&is->subpq, &is->subtitleq, SUBPICTURE_QUEUE_SIZE, 0) < 0)
  242. goto fail;
  243. if (frame_queue_init(&is->sampq, &is->audioq, SAMPLE_QUEUE_SIZE, 1) < 0)
  244. goto fail;
  245. if (packet_queue_init(&is->videoq) < 0 || packet_queue_init(&is->audioq) < 0
  246. || packet_queue_init(&is->subtitleq) < 0)
  247. goto fail;
  248. if (!(is->continue_read_thread = new QWaitCondition())) {
  249. av_log(nullptr, AV_LOG_FATAL, "new QWaitCondition() failed!\n");
  250. goto fail;
  251. }
  252. init_clock(&is->vidclk, &is->videoq.serial);
  253. init_clock(&is->audclk, &is->audioq.serial);
  254. init_clock(&is->extclk, &is->extclk.serial);
  255. is->audio_clock_serial = -1;
  256. if (startup_volume < 0)
  257. av_log(nullptr, AV_LOG_WARNING, "-volume=%d < 0, setting to 0\n", startup_volume);
  258. if (startup_volume > 100)
  259. av_log(nullptr, AV_LOG_WARNING, "-volume=%d > 100, setting to 100\n", startup_volume);
  260. startup_volume = av_clip(startup_volume, 0, 100);
  261. startup_volume = av_clip(SDL_MIX_MAXVOLUME * startup_volume / 100, 0, SDL_MIX_MAXVOLUME);
  262. is->audio_volume = startup_volume;
  263. is->muted = 0;
  264. is->av_sync_type = av_sync_type;
  265. // is->read_tid = SDL_CreateThread(read_thread, "read_thread", is);;
  266. is->read_thread_exit = -1;
  267. is->loop = int(m_bLoopPlay);
  268. // is->threads = {nullptr};
  269. is->threads.read_tid = nullptr;
  270. is->threads.video_decode_tid = nullptr;
  271. is->threads.audio_decode_tid = nullptr;
  272. is->threads.video_play_tid = nullptr;
  273. is->threads.audio_play_tid = nullptr;
  274. is->threads.subtitle_decode_tid = nullptr;
  275. #if USE_AVFILTER_AUDIO
  276. is->audio_speed = 1.0;
  277. #endif
  278. return is;
  279. fail:
  280. stream_close(is);
  281. return nullptr;
  282. }
  283. void VideoStateData::threads_setting(VideoState* is, const Threads& threads)
  284. {
  285. if (!is)
  286. return;
  287. assert(!is->threads.read_tid);
  288. assert(!is->threads.video_decode_tid);
  289. assert(!is->threads.audio_decode_tid);
  290. assert(!is->threads.video_play_tid);
  291. assert(!is->threads.audio_play_tid);
  292. assert(!is->threads.subtitle_decode_tid);
  293. // is->threads = threads;
  294. is->threads.read_tid = threads.read_tid;
  295. is->threads.video_decode_tid = threads.video_decode_tid;
  296. is->threads.audio_decode_tid = threads.audio_decode_tid;
  297. is->threads.video_play_tid = threads.video_play_tid;
  298. is->threads.audio_play_tid = threads.audio_play_tid;
  299. is->threads.subtitle_decode_tid = threads.subtitle_decode_tid;
  300. }
  301. void VideoStateData::threads_exit_wait(VideoState* is)
  302. {
  303. if (!is)
  304. return;
  305. auto try_stop_and_join = [](ThreadBase* t) {
  306. if (t) {
  307. t->stop(); // 通知线程退出
  308. t->join(); // 等待线程结束
  309. // 注意:不要delete,线程对象由PlayerController的unique_ptr管理
  310. }
  311. };
  312. try_stop_and_join(is->threads.video_play_tid);
  313. try_stop_and_join(is->threads.audio_play_tid);
  314. try_stop_and_join(is->threads.video_decode_tid);
  315. try_stop_and_join(is->threads.audio_decode_tid);
  316. try_stop_and_join(is->threads.subtitle_decode_tid);
  317. }
  318. void VideoStateData::stream_close(VideoState* is)
  319. {
  320. assert(is);
  321. is->abort_request = 1;
  322. // SDL_WaitThread(is->read_tid, NULL);
  323. {
  324. if (is->read_thread_exit == 0) {
  325. if (is->threads.read_tid) {
  326. is->threads.read_tid->stop();
  327. av_log(nullptr, AV_LOG_INFO, "read thread wait before!\n");
  328. is->threads.read_tid->join();
  329. av_log(nullptr, AV_LOG_INFO, "read thread wait after!\n");
  330. is->threads.read_tid = nullptr;
  331. }
  332. }
  333. }
  334. /* close each stream */
  335. if (is->audio_stream >= 0)
  336. stream_component_close(is, is->audio_stream);
  337. if (is->video_stream >= 0)
  338. stream_component_close(is, is->video_stream);
  339. if (is->subtitle_stream >= 0)
  340. stream_component_close(is, is->subtitle_stream);
  341. //threads_exit_wait(is); // 等待所有线程退出
  342. avformat_close_input(&is->ic);
  343. packet_queue_destroy(&is->videoq);
  344. packet_queue_destroy(&is->audioq);
  345. packet_queue_destroy(&is->subtitleq);
  346. /* free all pictures */
  347. frame_queue_destory(&is->pictq);
  348. frame_queue_destory(&is->sampq);
  349. frame_queue_destory(&is->subpq);
  350. // SDL_DestroyCond(is->continue_read_thread);
  351. if (is->continue_read_thread) {
  352. delete is->continue_read_thread;
  353. is->continue_read_thread = nullptr;
  354. }
  355. sws_freeContext(is->img_convert_ctx);
  356. sws_freeContext(is->sub_convert_ctx);
  357. av_free(is->filename);
  358. // if (is->vis_texture)
  359. // SDL_DestroyTexture(is->vis_texture);
  360. // if (is->vid_texture)
  361. // SDL_DestroyTexture(is->vid_texture);
  362. // if (is->sub_texture)
  363. // SDL_DestroyTexture(is->sub_texture);
  364. av_free(is);
  365. }
  366. static enum AVPixelFormat get_hw_format(AVCodecContext* ctx, const enum AVPixelFormat* pix_fmts)
  367. {
  368. for (const enum AVPixelFormat* p = pix_fmts; *p != -1; p++) {
  369. if (*p == hw_pix_fmt)
  370. return *p;
  371. }
  372. fprintf(stderr, "Failed to get HW surface format, codec_id=%d\n", (int) ctx->codec_id);
  373. return AV_PIX_FMT_NONE;
  374. }
  375. // static int hw_decoder_init(AVCodecContext* ctx, const enum AVHWDeviceType
  376. // type)
  377. //{
  378. // int err = 0;
  379. //
  380. // if ((err = av_hwdevice_ctx_create(&hw_device_ctx, type, nullptr,
  381. //nullptr, 0)) < 0) { fprintf(stderr, "Failed to create specified HW
  382. //device.\n"); return err;
  383. // }
  384. //
  385. // ctx->hw_device_ctx = av_buffer_ref(hw_device_ctx);
  386. //
  387. // return err;
  388. //}
  389. int VideoStateData::hw_decoder_init(AVCodecContext* ctx, const enum AVHWDeviceType type)
  390. {
  391. int err = 0;
  392. if ((err = av_hwdevice_ctx_create(&m_hw_device_ctx, type, nullptr, nullptr, 0)) < 0) {
  393. fprintf(stderr, "Failed to create specified HW device.\n");
  394. return err;
  395. }
  396. ctx->hw_device_ctx = av_buffer_ref(m_hw_device_ctx);
  397. return err;
  398. }
  399. bool VideoStateData::open_hardware(AVCodecContext* avctx, const AVCodec* codec, const char* device)
  400. {
  401. enum AVHWDeviceType type = get_hwdevice(device);
  402. hw_pix_fmt = get_hwdevice_decoder(codec, type);
  403. avctx->get_format = get_hw_format;
  404. if (hw_decoder_init(avctx, type) < 0)
  405. return false;
  406. return true;
  407. }
  408. void VideoStateData::close_hardware()
  409. {
  410. av_buffer_unref(&m_hw_device_ctx);
  411. }
  412. int VideoStateData::stream_component_open(VideoState* is, int stream_index)
  413. {
  414. assert(is);
  415. AVFormatContext* ic = is->ic;
  416. AVCodecContext* avctx;
  417. const AVCodec* codec;
  418. AVDictionary* opts = nullptr;
  419. // const AVDictionaryEntry* t = nullptr;
  420. int sample_rate, nb_channels;
  421. AVChannelLayout ch_layout = {0};
  422. // int64_t
  423. int format;
  424. int ret = 0;
  425. int stream_lowres = 0;
  426. if (stream_index < 0 || ((unsigned int) stream_index) >= ic->nb_streams)
  427. return -1;
  428. avctx = avcodec_alloc_context3(nullptr);
  429. if (!avctx)
  430. return AVERROR(ENOMEM);
  431. ret = avcodec_parameters_to_context(avctx, ic->streams[stream_index]->codecpar);
  432. if (ret < 0)
  433. goto fail;
  434. avctx->pkt_timebase = ic->streams[stream_index]->time_base;
  435. codec = avcodec_find_decoder(avctx->codec_id);
  436. switch (avctx->codec_type) {
  437. case AVMEDIA_TYPE_AUDIO:
  438. is->last_audio_stream = stream_index;
  439. break;
  440. case AVMEDIA_TYPE_SUBTITLE:
  441. is->last_subtitle_stream = stream_index;
  442. break;
  443. case AVMEDIA_TYPE_VIDEO:
  444. is->last_video_stream = stream_index;
  445. if (m_bUseHardware) {
  446. m_bHardwareSuccess = false;
  447. const char* hardware_device = "dxva2"; // device = <vaapi|vdpau|dxva2|d3d11va>
  448. ret = open_hardware(avctx, codec, hardware_device);
  449. if (!ret) {
  450. qWarning("hardware-accelerated opened failed, device:%s", hardware_device);
  451. goto fail;
  452. }
  453. qInfo("hardware-accelerated opened, device:%s", hardware_device);
  454. m_bHardwareSuccess = true;
  455. }
  456. break;
  457. }
  458. if (!codec) {
  459. av_log(nullptr,
  460. AV_LOG_WARNING,
  461. "No decoder could be found for codec %s\n",
  462. avcodec_get_name(avctx->codec_id));
  463. ret = AVERROR(EINVAL);
  464. goto fail;
  465. }
  466. avctx->codec_id = codec->id;
  467. if (stream_lowres > codec->max_lowres) {
  468. av_log(avctx,
  469. AV_LOG_WARNING,
  470. "The maximum value for lowres supported by the decoder is %d\n",
  471. codec->max_lowres);
  472. stream_lowres = codec->max_lowres;
  473. }
  474. avctx->lowres = stream_lowres;
  475. // avctx->flags2 |= AV_CODEC_FLAG2_FAST;
  476. /*opts = filter_codec_opts(codec_opts, avctx->codec_id, ic, ic->streams[stream_index], codec);
  477. if (!av_dict_get(opts, "threads", NULL, 0))
  478. av_dict_set(&opts, "threads", "auto", 0);
  479. if (stream_lowres)
  480. av_dict_set_int(&opts, "lowres", stream_lowres, 0);*/
  481. if ((ret = avcodec_open2(avctx, codec, &opts)) < 0) {
  482. goto fail;
  483. }
  484. is->eof = 0;
  485. ic->streams[stream_index]->discard = AVDISCARD_DEFAULT;
  486. switch (avctx->codec_type) {
  487. case AVMEDIA_TYPE_AUDIO:
  488. #if USE_AVFILTER_AUDIO
  489. {
  490. AVFilterContext* sink;
  491. // const char* afilters =
  492. // "aresample=8000,aformat=sample_fmts=s16:channel_layouts=mono"; //
  493. // "atempo=2"; const char* afilters = nullptr; const char* afilters =
  494. // "atempo=2.0";
  495. is->audio_filter_src.freq = avctx->sample_rate;
  496. is->audio_filter_src.ch_layout.nb_channels = avctx->ch_layout.nb_channels; // avctx->channels;
  497. is->audio_filter_src.ch_layout = avctx->ch_layout; // avctx->channel_layout
  498. is->audio_filter_src.fmt = avctx->sample_fmt;
  499. if ((ret = configure_audio_filters(is, is->afilters, 0)) < 0)
  500. goto fail;
  501. sink = is->out_audio_filter;
  502. sample_rate = av_buffersink_get_sample_rate(sink);
  503. nb_channels = av_buffersink_get_channels(sink);
  504. // channel_layout = av_buffersink_get_channel_layout(sink);
  505. format = av_buffersink_get_format(sink);
  506. AVChannelLayout chn_layout;
  507. av_buffersink_get_ch_layout(sink, &chn_layout);
  508. qDebug("afilter sink: sample rate:%d, chn:%d, fmt:%d, chn_layout:%d",
  509. sample_rate,
  510. nb_channels,
  511. format,
  512. chn_layout.u);
  513. }
  514. #else
  515. sample_rate = avctx->sample_rate;
  516. ret = av_channel_layout_copy(&ch_layout, &avctx->ch_layout);
  517. if (ret < 0)
  518. goto fail;
  519. #endif
  520. /* prepare audio output */
  521. /*if ((ret = audio_open(is, chn_layout, nb_channels, sample_rate,
  522. &is->audio_tgt)) < 0) goto fail;
  523. is->audio_src = is->audio_tgt;*/
  524. is->audio_stream = stream_index;
  525. is->audio_st = ic->streams[stream_index];
  526. if ((is->ic->iformat->flags
  527. & (AVFMT_NOBINSEARCH | AVFMT_NOGENSEARCH | AVFMT_NO_BYTE_SEEK))) {
  528. is->auddec.start_pts = is->audio_st->start_time;
  529. is->auddec.start_pts_tb = is->audio_st->time_base;
  530. }
  531. m_bHasAudio = true;
  532. m_avctxAudio = avctx;
  533. break;
  534. case AVMEDIA_TYPE_VIDEO:
  535. is->video_stream = stream_index;
  536. is->video_st = ic->streams[stream_index];
  537. // 设置视频水印
  538. set_video_watermark(is, "Watermark");
  539. m_bHasVideo = true;
  540. m_avctxVideo = avctx;
  541. break;
  542. case AVMEDIA_TYPE_SUBTITLE:
  543. is->subtitle_stream = stream_index;
  544. is->subtitle_st = ic->streams[stream_index];
  545. m_bHasSubtitle = true;
  546. m_avctxSubtitle = avctx;
  547. break;
  548. default:
  549. break;
  550. }
  551. goto out;
  552. fail:
  553. avcodec_free_context(&avctx);
  554. out:
  555. av_dict_free(&opts);
  556. return ret;
  557. }
  558. void VideoStateData::stream_component_close(VideoState* is, int stream_index)
  559. {
  560. assert(is);
  561. AVFormatContext* ic = is->ic;
  562. AVCodecParameters* codecpar;
  563. if (stream_index < 0 || ((unsigned int) stream_index) >= ic->nb_streams)
  564. return;
  565. codecpar = ic->streams[stream_index]->codecpar;
  566. switch (codecpar->codec_type) {
  567. case AVMEDIA_TYPE_AUDIO:
  568. decoder_abort(&is->auddec, &is->sampq);
  569. // SDL_CloseAudioDevice(audio_dev);
  570. decoder_destroy(&is->auddec);
  571. // swr_free(&is->swr_ctx);
  572. // av_freep(&is->audio_buf1);
  573. // is->audio_buf1_size = 0;
  574. // is->audio_buf = nullptr;
  575. /*if (is->rdft) {
  576. av_rdft_end(is->rdft);
  577. av_freep(&is->rdft_data);
  578. is->rdft = nullptr;
  579. is->rdft_bits = 0;
  580. }*/
  581. break;
  582. case AVMEDIA_TYPE_VIDEO:
  583. decoder_abort(&is->viddec, &is->pictq);
  584. decoder_destroy(&is->viddec);
  585. break;
  586. case AVMEDIA_TYPE_SUBTITLE:
  587. decoder_abort(&is->subdec, &is->subpq);
  588. decoder_destroy(&is->subdec);
  589. break;
  590. default:
  591. qDebug("Not handled yet.......code type:%d", codecpar->codec_type);
  592. break;
  593. }
  594. ic->streams[stream_index]->discard = AVDISCARD_ALL;
  595. switch (codecpar->codec_type) {
  596. case AVMEDIA_TYPE_AUDIO:
  597. is->audio_st = nullptr;
  598. is->audio_stream = -1;
  599. break;
  600. case AVMEDIA_TYPE_VIDEO:
  601. is->video_st = nullptr;
  602. is->video_stream = -1;
  603. break;
  604. case AVMEDIA_TYPE_SUBTITLE:
  605. is->subtitle_st = nullptr;
  606. is->subtitle_stream = -1;
  607. break;
  608. default:
  609. break;
  610. }
  611. }
  612. bool VideoStateData::has_video() const
  613. {
  614. return m_bHasVideo;
  615. }
  616. bool VideoStateData::has_audio() const
  617. {
  618. return m_bHasAudio;
  619. }
  620. bool VideoStateData::has_subtitle() const
  621. {
  622. return m_bHasSubtitle;
  623. }
  624. AVCodecContext* VideoStateData::get_contex(AVMediaType type) const
  625. {
  626. AVCodecContext* pCtx = nullptr;
  627. switch (type) {
  628. case AVMEDIA_TYPE_AUDIO:
  629. pCtx = m_avctxAudio;
  630. break;
  631. case AVMEDIA_TYPE_VIDEO:
  632. pCtx = m_avctxVideo;
  633. break;
  634. case AVMEDIA_TYPE_SUBTITLE:
  635. pCtx = m_avctxSubtitle;
  636. break;
  637. default:
  638. break;
  639. }
  640. return pCtx;
  641. }
  642. enum AVHWDeviceType VideoStateData::get_hwdevice(const char* device) const
  643. {
  644. // device = <vaapi|vdpau|dxva2|d3d11va>
  645. enum AVHWDeviceType type = av_hwdevice_find_type_by_name(device);
  646. if (type == AV_HWDEVICE_TYPE_NONE) {
  647. av_log(nullptr, AV_LOG_WARNING, "Device type %s is not supported.\n", device);
  648. av_log(nullptr, AV_LOG_INFO, "Available device types:");
  649. while ((type = av_hwdevice_iterate_types(type)) != AV_HWDEVICE_TYPE_NONE)
  650. av_log(nullptr, AV_LOG_INFO, " %s", av_hwdevice_get_type_name(type));
  651. av_log(nullptr, AV_LOG_INFO, "\n");
  652. return AV_HWDEVICE_TYPE_NONE;
  653. }
  654. return type;
  655. }
  656. enum AVPixelFormat VideoStateData::get_hwdevice_decoder(const AVCodec* decoder,
  657. enum AVHWDeviceType type) const
  658. {
  659. if (!decoder || AV_HWDEVICE_TYPE_NONE == type)
  660. return AV_PIX_FMT_NONE;
  661. for (int i = 0;; i++) {
  662. const AVCodecHWConfig* config = avcodec_get_hw_config(decoder, i);
  663. if (!config) {
  664. av_log(nullptr,
  665. AV_LOG_WARNING,
  666. "Decoder %s does not support device type %s.\n",
  667. decoder->name,
  668. av_hwdevice_get_type_name(type));
  669. return AV_PIX_FMT_NONE;
  670. }
  671. if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX
  672. && config->device_type == type) {
  673. return config->pix_fmt;
  674. }
  675. }
  676. return AV_PIX_FMT_NONE;
  677. }