#include "player_core_v2.h" #include "../base/media_common.h" #include "../base/logger.h" #include "../base/types.h" #include "../codec/codec_video_decoder.h" #include "../codec/codec_audio_decoder.h" #include "../utils/utils_synchronizer_v2.h" #include #include #include #include namespace av { namespace player { PlayerCoreV2::PlayerCoreV2(const SyncConfigV2& syncConfig) : m_state(PlayerState::Idle) , m_eventCallback(nullptr) , m_formatContext(nullptr) // , m_openGLVideoRenderer(nullptr) // 已移除 , m_volume(1.0) , m_playbackSpeed(1.0) , m_seekTarget(-1) , m_seeking(false) , m_baseTime(0) , m_lastUpdateTime(0) , m_threadsShouldStop(false) , m_threadsRunning(false) , m_paused(false) , m_initialized(false) , m_frameCount(0) , m_lastFrameCount(0) , m_errorCount(0) , m_buffering(false) , m_bufferHealth(1.0) { Logger::instance().info("PlayerCoreV2 created"); try { // 初始化FFmpeg if (!initializeFFmpeg()) { Logger::instance().error("Failed to initialize FFmpeg"); setState(PlayerState::Error); return; } // 创建分离的视频和音频包队列 - 使用高容量队列以避免丢帧 m_videoPacketQueue = av::utils::PacketQueueFactory::createHighCapacityQueue(2000); // 视频包队列,增大容量 if (!m_videoPacketQueue) { Logger::instance().error("Failed to create video packet queue"); setState(PlayerState::Error); return; } // 禁用丢包策略 m_videoPacketQueue->setDropPolicy(false, true); m_audioPacketQueue = av::utils::PacketQueueFactory::createHighCapacityQueue(2000); // 音频包队列,增大容量 if (!m_audioPacketQueue) { Logger::instance().error("Failed to create audio packet queue"); setState(PlayerState::Error); return; } // 禁用丢包策略 m_audioPacketQueue->setDropPolicy(false, true); m_videoFrameQueue = av::utils::FrameQueueFactory::createHighCapacityQueue(100); // 增加视频帧队列容量并禁用丢帧 if (!m_videoFrameQueue) { Logger::instance().error("Failed to create video frame queue"); setState(PlayerState::Error); return; } // 禁用丢帧策略 m_videoFrameQueue->setDropPolicy(false, true); m_audioFrameQueue = av::utils::FrameQueueFactory::createHighCapacityQueue(400); // 增加音频帧队列容量并禁用丢帧 if (!m_audioFrameQueue) { Logger::instance().error("Failed to create audio frame queue"); setState(PlayerState::Error); return; } // 禁用丢帧策略 m_audioFrameQueue->setDropPolicy(false, true); // 创建改进的同步器 m_synchronizer = std::make_unique(syncConfig); if (!m_synchronizer) { Logger::instance().error("Failed to create synchronizer"); setState(PlayerState::Error); return; } // 设置同步器回调 m_synchronizer->setSyncErrorCallback([this](double error, const std::string& reason) { handleSyncError(error, reason); }); m_synchronizer->setFrameDropCallback([this](av::utils::ClockType type, int64_t pts) { std::lock_guard lock(m_mutex); m_stats.droppedFrames++; if (m_eventCallback) { m_eventCallback->onFrameDropped(m_stats.droppedFrames); } }); // 创建解码器 m_videoDecoder = std::make_unique(); if (!m_videoDecoder) { Logger::instance().error("Failed to create video decoder"); setState(PlayerState::Error); return; } m_audioDecoder = std::make_unique(); if (!m_audioDecoder) { Logger::instance().error("Failed to create audio decoder"); setState(PlayerState::Error); return; } // 创建音频输出设备 m_audioOutput = std::make_unique(); if (!m_audioOutput) { Logger::instance().error("Failed to create audio output"); setState(PlayerState::Error); return; } // 初始化性能监控 m_lastStatsUpdate = std::chrono::steady_clock::now(); m_lastCpuMeasure = m_lastStatsUpdate; m_initialized = true; Logger::instance().info("PlayerCoreV2 initialized successfully"); } catch (const std::exception& e) { Logger::instance().error("Exception during PlayerCoreV2 initialization: " + std::string(e.what())); setState(PlayerState::Error); m_initialized = false; } catch (...) { Logger::instance().error("Unknown exception during PlayerCoreV2 initialization"); setState(PlayerState::Error); m_initialized = false; } } PlayerCoreV2::~PlayerCoreV2() { Logger::instance().info("PlayerCoreV2 destroying..."); stop(); cleanup(); Logger::instance().info("PlayerCoreV2 destroyed"); } void PlayerCoreV2::setEventCallback(PlayerEventCallback* callback) { std::lock_guard lock(m_mutex); m_eventCallback = callback; } ErrorCode PlayerCoreV2::openFile(const std::string& filename) { Logger::instance().info("Opening file: " + filename); if (!m_initialized) { Logger::instance().error("PlayerCoreV2 not initialized"); return ErrorCode::NOT_INITIALIZED; } // 如果正在播放,先停止 if (m_state != PlayerState::Idle) { stop(); } setState(PlayerState::Opening); // 打开媒体文件 if (!openMediaFile(filename)) { setState(PlayerState::Error); notifyError("Failed to open media file: " + filename); return ErrorCode::FILE_OPEN_FAILED; } // 设置媒体信息 m_mediaInfo.filename = filename; // 设置解码器 if (m_mediaInfo.hasVideo && !setupVideoDecoder()) { Logger::instance().error("Failed to setup video decoder"); setState(PlayerState::Error); return ErrorCode::CODEC_OPEN_FAILED; } if (m_mediaInfo.hasAudio && !setupAudioDecoder()) { Logger::instance().error("Failed to setup audio decoder"); setState(PlayerState::Error); return ErrorCode::CODEC_OPEN_FAILED; } // 注意:同步器的初始化已经在openMediaFile()中完成,这里不需要重复调用 // 因为initialize()会重新选择主时钟,可能覆盖之前设置的流信息 setState(PlayerState::Stopped); // 如果有视频流,通知需要初始化渲染器 if (m_mediaInfo.hasVideo && m_eventCallback) { AVStream* videoStream = m_formatContext->streams[m_mediaInfo.videoStreamIndex]; m_eventCallback->onVideoRendererInitRequired( videoStream->codecpar->width, videoStream->codecpar->height ); Logger::instance().info("Video renderer initialization requested"); } // 通知媒体信息变化 if (m_eventCallback) { m_eventCallback->onMediaInfoChanged(m_mediaInfo); } Logger::instance().info("File opened successfully: " + filename); return ErrorCode::SUCCESS; } ErrorCode PlayerCoreV2::play() { Logger::instance().info("Starting playback"); if (m_state == PlayerState::Playing) { Logger::instance().debug("Already playing"); return ErrorCode::SUCCESS; } if (m_state != PlayerState::Stopped && m_state != PlayerState::Paused) { Logger::instance().error("Invalid state for play: " + std::to_string(static_cast(m_state.load()))); return ErrorCode::INVALID_STATE; } // 启动或恢复同步器 if (m_synchronizer) { ErrorCode syncResult = ErrorCode::SUCCESS; if (m_state == PlayerState::Paused) { // 从暂停状态恢复,调用resume方法 syncResult = m_synchronizer->resume(); } else { // 从停止状态开始,调用start方法 syncResult = m_synchronizer->start(); } if (syncResult != ErrorCode::SUCCESS) { Logger::instance().error("Failed to start/resume synchronizer"); return ErrorCode::SYNC_ERROR; } } // 根据当前状态设置播放开始时间 if (m_state == PlayerState::Stopped) { // 从停止状态开始播放,重置所有时间相关变量 m_baseTime = 0; m_frameCount = 0; m_lastFrameCount = 0; m_playStartTime = std::chrono::steady_clock::now(); // 重置统计信息 std::lock_guard lock(m_mutex); m_stats = PlaybackStats(); m_stats.playbackSpeed = m_playbackSpeed; } else if (m_state == PlayerState::Paused) { // 从暂停状态恢复播放,重新设置播放开始时间 // m_baseTime已经在pause()方法中正确更新,这里只需要重新设置开始时间 m_playStartTime = std::chrono::steady_clock::now(); // 清除暂停标志并唤醒等待的线程,类似ffplay.c中的continue_read_thread机制 { std::lock_guard lock(m_pauseMutex); m_paused = false; } m_pauseCondition.notify_all(); } // 启动音频输出设备 if (m_audioOutput && m_mediaInfo.hasAudio) { if (m_state == PlayerState::Paused) { // 从暂停状态恢复音频 try { m_audioOutput->resume(); } catch (const std::exception& e) { Logger::instance().error("Failed to resume audio output: " + std::string(e.what())); // 尝试重新初始化音频设备 try { m_audioOutput->stop(); std::this_thread::sleep_for(std::chrono::milliseconds(100)); m_audioOutput->start(); } catch (const std::exception& e2) { Logger::instance().error("Failed to restart audio output: " + std::string(e2.what())); Logger::instance().warning("Continuing playback without audio"); } } } else { Logger::instance().info("Starting audio output device..."); try { m_audioOutput->start(); // 检查音频设备是否成功启动 if (!m_audioOutput->isPlaying()) { Logger::instance().error("Audio output device failed to start"); // 不要因为音频失败而停止整个播放,继续播放视频 Logger::instance().warning("Continuing playback without audio"); } } catch (const std::exception& e) { Logger::instance().error("Exception starting audio output: " + std::string(e.what())); Logger::instance().warning("Continuing playback without audio"); } } } // 根据状态启动或恢复线程 if (m_state == PlayerState::Stopped) { // 从停止状态开始,需要启动所有线程 // 启动播放线程 if (m_mediaInfo.hasVideo && !startVideoPlayThread()) { Logger::instance().error("Failed to start video play thread"); return ErrorCode::THREAD_ERROR; } if (m_mediaInfo.hasAudio && !startAudioPlayThread()) { Logger::instance().error("Failed to start audio play thread"); return ErrorCode::THREAD_ERROR; } // 启动解码线程 if (!startDecodeThreads()) { Logger::instance().error("Failed to start decode threads"); return ErrorCode::THREAD_ERROR; } // 启动读取线程 if (!startReadThread()) { Logger::instance().error("Failed to start read thread"); return ErrorCode::THREAD_ERROR; } m_threadsRunning = true; } // 从暂停状态恢复时,线程已经在运行,只需要唤醒它们(已在上面完成) setState(PlayerState::Playing); Logger::instance().info("Playback started"); return ErrorCode::SUCCESS; } ErrorCode PlayerCoreV2::pause() { Logger::instance().info("Pausing playback"); if (m_state != PlayerState::Playing) { Logger::instance().debug("Not playing, cannot pause"); return ErrorCode::INVALID_STATE; } // 设置暂停标志,让线程在内部循环中等待,而不是停止线程 // 类似ffplay.c中的paused标志机制 { std::lock_guard lock(m_pauseMutex); Logger::instance().info("Setting m_paused to true"); m_paused = true; m_videoPacketQueue->wakeup(); // 唤醒等待的解码线程 m_audioPacketQueue->wakeup(); // 唤醒等待的解码线程 m_videoFrameQueue->wakeup(); // 唤醒等待的播放线程 m_audioFrameQueue->wakeup(); // 唤醒等待的播放线程 Logger::instance().info("m_paused set to: " + std::to_string(m_paused.load())); } // 立即通知所有等待的线程检查暂停状态 Logger::instance().info("Notifying all threads about pause state change"); m_pauseCondition.notify_all(); // 暂停同步器 if (m_synchronizer && m_synchronizer->pause() != ErrorCode::SUCCESS) { Logger::instance().warning("Failed to pause synchronizer"); } // 记录暂停时的播放时间 if (m_playStartTime.time_since_epoch().count() != 0) { auto currentTime = std::chrono::steady_clock::now(); auto elapsed = std::chrono::duration_cast( currentTime - m_playStartTime).count(); m_baseTime += static_cast(elapsed * m_playbackSpeed); m_playStartTime = std::chrono::steady_clock::time_point{}; } // 暂停音频输出 if (m_audioOutput) { m_audioOutput->pause(); } setState(PlayerState::Paused); Logger::instance().info("Playback paused (threads continue running)"); return ErrorCode::SUCCESS; } ErrorCode PlayerCoreV2::stop() { Logger::instance().info("Stopping playback"); if (m_state == PlayerState::Idle || m_state == PlayerState::Stopped) { Logger::instance().debug("Already stopped"); return ErrorCode::SUCCESS; } // 停止同步器 if (m_synchronizer) { m_synchronizer->stop(); } // 停止音频输出 if (m_audioOutput) { m_audioOutput->stop(); } // 通知关闭视频渲染器 if (m_eventCallback) { m_eventCallback->onVideoRendererCloseRequired(); } // 清除暂停标志并唤醒所有等待的线程 { std::lock_guard lock(m_pauseMutex); m_paused = false; } m_pauseCondition.notify_all(); // 停止所有线程 stopAllThreads(); // 重置解码器 resetDecoders(); // 清空队列 if (m_videoPacketQueue) m_videoPacketQueue->clear(); if (m_audioPacketQueue) m_audioPacketQueue->clear(); if (m_videoFrameQueue) m_videoFrameQueue->clear(); if (m_audioFrameQueue) m_audioFrameQueue->clear(); // 重置时间 m_baseTime = 0; m_playStartTime = std::chrono::steady_clock::time_point{}; setState(PlayerState::Stopped); Logger::instance().info("Playback stopped"); return ErrorCode::SUCCESS; } ErrorCode PlayerCoreV2::seek(int64_t timestamp) { Logger::instance().info("Seeking to: " + std::to_string(timestamp)); if (m_state == PlayerState::Idle || m_state == PlayerState::Opening) { Logger::instance().error("Invalid state for seek"); return ErrorCode::INVALID_STATE; } std::unique_lock lock(m_seekMutex); // 设置seek目标和精确范围参数 m_seekTarget = timestamp; m_seekMinTime = INT64_MIN; // 允许向前跳转的最小时间 m_seekMaxTime = INT64_MAX; // 允许向后跳转的最大时间 m_seekFlags = AVSEEK_FLAG_BACKWARD; // 默认向后跳转到关键帧 m_seeking = true; // 更新基准时间为跳转目标时间 m_baseTime = timestamp; m_playStartTime = std::chrono::steady_clock::now(); // 重置同步器 if (m_synchronizer) { m_synchronizer->reset(); } // 清空队列 flushBuffers(); setState(PlayerState::Seeking); // 通知seek条件 m_seekCondition.notify_all(); Logger::instance().info("Precise seek initiated with avformat_seek_file support"); return ErrorCode::SUCCESS; } ErrorCode PlayerCoreV2::setPlaybackSpeed(double speed) { if (speed <= 0.0 || speed > 4.0) { Logger::instance().error("Invalid playback speed: " + std::to_string(speed)); return ErrorCode::INVALID_PARAMS; } std::lock_guard lock(m_mutex); m_playbackSpeed = speed; // 设置同步器的播放速度 if (m_synchronizer) { m_synchronizer->setPlaybackSpeed(speed); } // 设置音频输出的播放速度 if (m_audioOutput) { m_audioOutput->setPlaybackSpeed(speed); } // 更新统计信息 m_stats.playbackSpeed = speed; Logger::instance().info("Playback speed set to: " + std::to_string(speed)); return ErrorCode::SUCCESS; } MediaInfo PlayerCoreV2::getMediaInfo() const { std::lock_guard lock(m_mutex); return m_mediaInfo; } PlaybackStats PlayerCoreV2::getStats() const { std::lock_guard lock(m_mutex); PlaybackStats stats = m_stats; // 更新当前时间 stats.currentTime = getCurrentTime(); // 更新队列大小 if (m_videoPacketQueue) stats.queuedPackets += m_videoPacketQueue->size(); if (m_audioPacketQueue) stats.queuedPackets += m_audioPacketQueue->size(); if (m_videoFrameQueue) stats.queuedVideoFrames = m_videoFrameQueue->size(); if (m_audioFrameQueue) stats.queuedAudioFrames = m_audioFrameQueue->size(); // 更新同步统计 if (m_synchronizer) { auto syncStats = m_synchronizer->getStats(); stats.syncError = syncStats.audioVideoSyncError; stats.avgSyncError = syncStats.avgSyncError; stats.maxSyncError = syncStats.maxSyncError; stats.droppedFrames = syncStats.droppedFrames; stats.duplicatedFrames = syncStats.duplicatedFrames; } return stats; } int64_t PlayerCoreV2::getCurrentTime() const { if (m_state == PlayerState::Idle || m_state == PlayerState::Stopped) { return 0; } // 如果同步器可用,尝试使用主时钟的播放时间 if (m_synchronizer) { // 获取主时钟时间(秒),转换为微秒 double masterClockTime = m_synchronizer->getMasterClock(); // 确保时间值合理(非负且不是NaN) if (masterClockTime >= 0.0 && !std::isnan(masterClockTime)) { return static_cast(masterClockTime * 1000000); } } // 回退到原来的计算方式(兼容性保证) if (m_state == PlayerState::Paused) { return m_baseTime; } if (m_playStartTime.time_since_epoch().count() == 0) { return m_baseTime; } auto currentTime = std::chrono::steady_clock::now(); auto elapsed = std::chrono::duration_cast( currentTime - m_playStartTime).count(); return m_baseTime + static_cast(elapsed * m_playbackSpeed); } double PlayerCoreV2::getPlaybackSpeed() const { return m_playbackSpeed; } void PlayerCoreV2::setVolume(double volume) { volume = std::max(0.0, std::min(1.0, volume)); m_volume = volume; // 同时设置音频输出设备的音量 if (m_audioOutput) { m_audioOutput->setVolume(volume); } Logger::instance().debug("Volume set to: " + std::to_string(volume)); } // 流控制接口实现 void PlayerCoreV2::enableVideoStream(bool enable) { if (m_videoStreamEnabled == enable) { return; // 状态未变,不需要处理 } m_videoStreamEnabled = enable; Logger::instance().info("Video stream " + std::string(enable ? "enabled" : "disabled")); // 如果播放器已经打开文件,需要更新同步器的流信息 if (m_formatContext && m_synchronizer) { bool useAudio = m_mediaInfo.hasAudio && m_audioStreamEnabled; bool useVideo = m_mediaInfo.hasVideo && m_videoStreamEnabled; m_synchronizer->setStreamInfo(useAudio, useVideo); // 如果正在播放,需要重新启动播放 if (m_state == PlayerState::Playing || m_state == PlayerState::Paused) { // 停止当前播放 PlayerState oldState = m_state; stop(); // 重新开始播放 openFile(m_mediaInfo.filename); if (oldState == PlayerState::Playing) { play(); } } } } void PlayerCoreV2::enableAudioStream(bool enable) { if (m_audioStreamEnabled == enable) { return; // 状态未变,不需要处理 } m_audioStreamEnabled = enable; Logger::instance().info("Audio stream " + std::string(enable ? "enabled" : "disabled")); // 如果播放器已经打开文件,需要更新同步器的流信息 if (m_formatContext && m_synchronizer) { bool useAudio = m_mediaInfo.hasAudio && m_audioStreamEnabled; bool useVideo = m_mediaInfo.hasVideo && m_videoStreamEnabled; m_synchronizer->setStreamInfo(useAudio, useVideo); // 如果正在播放,需要重新启动播放 if (m_state == PlayerState::Playing) { // 暂停当前播放 pause(); // 重新开始播放 play(); } } } bool PlayerCoreV2::isVideoStreamEnabled() const { return m_videoStreamEnabled; } bool PlayerCoreV2::isAudioStreamEnabled() const { return m_audioStreamEnabled; } void PlayerCoreV2::setSyncConfig(const SyncConfigV2& config) { if (m_synchronizer) { m_synchronizer->setConfig(config); } } SyncConfigV2 PlayerCoreV2::getSyncConfig() const { if (m_synchronizer) { return m_synchronizer->getConfig(); } return SyncConfigV2(); } // void PlayerCoreV2::setOpenGLVideoRenderer(OpenGLVideoWidget* renderer) { // m_openGLVideoRenderer = renderer; // } AVFramePtr PlayerCoreV2::getNextVideoFrame() { if (!m_videoFrameQueue || m_state != PlayerState::Playing) { return nullptr; } return m_videoFrameQueue->pop(); } AVFramePtr PlayerCoreV2::getNextAudioFrame() { if (!m_audioFrameQueue || m_state != PlayerState::Playing) { return nullptr; } return m_audioFrameQueue->pop(); } void PlayerCoreV2::update() { if (!m_initialized) { return; } // 更新同步状态 updateSynchronization(); // 更新统计信息 auto now = std::chrono::steady_clock::now(); if (std::chrono::duration_cast(now - m_lastStatsUpdate).count() > 500) { updateStats(); updatePerformanceStats(); m_lastStatsUpdate = now; // 通知位置变化 notifyPositionChanged(); } // 检查错误恢复 if (m_errorCount > 0) { auto timeSinceError = std::chrono::duration_cast(now - m_lastErrorTime).count(); if (timeSinceError > 5) { // 5秒后重置错误计数 m_errorCount = 0; } } } std::string PlayerCoreV2::getDebugInfo() const { std::ostringstream oss; oss << "PlayerCoreV2 Debug Info:\n"; oss << " State: " << static_cast(m_state.load()) << "\n"; oss << " Initialized: " << (m_initialized ? "Yes" : "No") << "\n"; oss << " Threads Running: " << (m_threadsRunning ? "Yes" : "No") << "\n"; oss << " Current Time: " << getCurrentTime() << " us\n"; oss << " Playback Speed: " << m_playbackSpeed << "x\n"; oss << " Volume: " << m_volume << "\n"; oss << " Error Count: " << m_errorCount << "\n"; if (m_synchronizer) { oss << "\n" << m_synchronizer->getDebugInfo(); } return oss.str(); } void PlayerCoreV2::dumpStats() const { PlaybackStats stats = getStats(); Logger::instance().info("=== PlayerCoreV2 Statistics ==="); Logger::instance().info("Current Time: " + std::to_string(stats.currentTime) + " us"); Logger::instance().info("Total Frames: " + std::to_string(stats.totalFrames)); Logger::instance().info("Dropped Frames: " + std::to_string(stats.droppedFrames)); Logger::instance().info("Duplicated Frames: " + std::to_string(stats.duplicatedFrames)); Logger::instance().info("Sync Error: " + std::to_string(stats.syncError * 1000) + " ms"); Logger::instance().info("Avg Sync Error: " + std::to_string(stats.avgSyncError * 1000) + " ms"); Logger::instance().info("Max Sync Error: " + std::to_string(stats.maxSyncError * 1000) + " ms"); Logger::instance().info("CPU Usage: " + std::to_string(stats.cpuUsage) + "%"); Logger::instance().info("Memory Usage: " + std::to_string(stats.memoryUsage) + " MB"); Logger::instance().info("Queued Packets: " + std::to_string(stats.queuedPackets)); Logger::instance().info("Queued Video Frames: " + std::to_string(stats.queuedVideoFrames)); Logger::instance().info("Queued Audio Frames: " + std::to_string(stats.queuedAudioFrames)); Logger::instance().info("==============================="); } bool PlayerCoreV2::openMediaFile(const std::string& filename) { // 关闭之前的文件 if (m_formatContext) { avformat_close_input(&m_formatContext); m_formatContext = nullptr; } // 分配格式上下文 m_formatContext = avformat_alloc_context(); if (!m_formatContext) { Logger::instance().error("Failed to allocate format context"); return false; } // 打开输入文件 if (avformat_open_input(&m_formatContext, filename.c_str(), nullptr, nullptr) < 0) { Logger::instance().error("Failed to open input file: " + filename); avformat_free_context(m_formatContext); m_formatContext = nullptr; return false; } // 查找流信息 if (avformat_find_stream_info(m_formatContext, nullptr) < 0) { Logger::instance().error("Failed to find stream info"); avformat_close_input(&m_formatContext); return false; } // 查找视频和音频流 m_mediaInfo.videoStreamIndex = av_find_best_stream(m_formatContext, AVMEDIA_TYPE_VIDEO, -1, -1, nullptr, 0); m_mediaInfo.audioStreamIndex = av_find_best_stream(m_formatContext, AVMEDIA_TYPE_AUDIO, -1, -1, nullptr, 0); m_mediaInfo.hasVideo = (m_mediaInfo.videoStreamIndex >= 0); m_mediaInfo.hasAudio = (m_mediaInfo.audioStreamIndex >= 0); if (!m_mediaInfo.hasVideo && !m_mediaInfo.hasAudio) { Logger::instance().error("No video or audio streams found"); avformat_close_input(&m_formatContext); return false; } // 获取媒体信息 m_mediaInfo.duration = m_formatContext->duration; m_mediaInfo.bitrate = m_formatContext->bit_rate; if (m_mediaInfo.hasVideo) { AVStream* videoStream = m_formatContext->streams[m_mediaInfo.videoStreamIndex]; m_mediaInfo.width = videoStream->codecpar->width; m_mediaInfo.height = videoStream->codecpar->height; // 计算帧率 if (videoStream->avg_frame_rate.den != 0) { m_mediaInfo.fps = av_q2d(videoStream->avg_frame_rate); } else if (videoStream->r_frame_rate.den != 0) { m_mediaInfo.fps = av_q2d(videoStream->r_frame_rate); } else { m_mediaInfo.fps = 25.0; // 默认帧率 } Logger::instance().info("Video stream found: " + std::to_string(m_mediaInfo.width) + "x" + std::to_string(m_mediaInfo.height) + " @ " + std::to_string(m_mediaInfo.fps) + " fps"); } if (m_mediaInfo.hasAudio) { AVStream* audioStream = m_formatContext->streams[m_mediaInfo.audioStreamIndex]; m_mediaInfo.sampleRate = audioStream->codecpar->sample_rate; m_mediaInfo.channels = audioStream->codecpar->ch_layout.nb_channels; Logger::instance().info("Audio stream found: " + std::to_string(m_mediaInfo.sampleRate) + " Hz, " + std::to_string(m_mediaInfo.channels) + " channels"); } // 设置同步器的流信息 if (m_synchronizer) { bool useAudio = m_mediaInfo.hasAudio && m_audioStreamEnabled; bool useVideo = m_mediaInfo.hasVideo && m_videoStreamEnabled; m_synchronizer->setStreamInfo(useAudio, useVideo); Logger::instance().info("Synchronizer stream info set: hasAudio=" + std::to_string(useAudio) + ", hasVideo=" + std::to_string(useVideo)); // 在设置流信息后初始化同步器,确保主时钟选择基于正确的流信息 if (m_synchronizer->initialize() != ErrorCode::SUCCESS) { Logger::instance().error("Failed to initialize synchronizer"); return false; } Logger::instance().info("Synchronizer initialized with correct stream info"); } Logger::instance().info("Media file opened successfully: " + filename); return true; } bool PlayerCoreV2::setupVideoDecoder() { if (!m_mediaInfo.hasVideo || !m_videoDecoder) { return false; } AVStream* videoStream = m_formatContext->streams[m_mediaInfo.videoStreamIndex]; // 查找解码器 const AVCodec* codec = avcodec_find_decoder(videoStream->codecpar->codec_id); if (!codec) { Logger::instance().error("Video codec not found"); return false; } Logger::instance().info("Found video codec: " + std::string(codec->name)); Logger::instance().info("Video stream info: width=" + std::to_string(videoStream->codecpar->width) + ", height=" + std::to_string(videoStream->codecpar->height) + ", format=" + std::to_string(videoStream->codecpar->format)); // 创建视频解码器参数 VideoDecoderParams videoParams; videoParams.codecName = codec->name; videoParams.width = videoStream->codecpar->width; videoParams.height = videoStream->codecpar->height; videoParams.pixelFormat = static_cast(videoStream->codecpar->format); videoParams.hardwareAccel = false; // 先禁用硬件加速来排除问题 videoParams.lowLatency = false; Logger::instance().info("Video decoder params: codec=" + videoParams.codecName + ", size=" + std::to_string(videoParams.width) + "x" + std::to_string(videoParams.height) + ", format=" + std::to_string(static_cast(videoParams.pixelFormat))); // 初始化视频解码器 if (m_videoDecoder->initialize(videoParams) != ErrorCode::SUCCESS) { Logger::instance().error("Failed to initialize video decoder"); return false; } // 设置流参数 if (m_videoDecoder->setStreamParameters(videoStream->codecpar) != ErrorCode::SUCCESS) { Logger::instance().error("Failed to set video decoder stream parameters"); return false; } // 打开视频解码器 if (m_videoDecoder->open(videoParams) != ErrorCode::SUCCESS) { Logger::instance().error("Failed to open video decoder"); return false; } Logger::instance().info("Video decoder setup successfully"); return true; } bool PlayerCoreV2::setupAudioDecoder() { if (!m_mediaInfo.hasAudio || !m_audioDecoder) { return false; } AVStream* audioStream = m_formatContext->streams[m_mediaInfo.audioStreamIndex]; // 查找解码器 const AVCodec* codec = avcodec_find_decoder(audioStream->codecpar->codec_id); if (!codec) { Logger::instance().error("Audio codec not found"); return false; } // 分配解码器上下文 AVCodecContext* codecContext = avcodec_alloc_context3(codec); if (!codecContext) { Logger::instance().error("Failed to allocate audio codec context"); return false; } // 复制流参数到解码器上下文 if (avcodec_parameters_to_context(codecContext, audioStream->codecpar) < 0) { Logger::instance().error("Failed to copy audio codec parameters"); avcodec_free_context(&codecContext); return false; } // 打开解码器 if (avcodec_open2(codecContext, codec, nullptr) < 0) { Logger::instance().error("Failed to open audio codec"); avcodec_free_context(&codecContext); return false; } // 创建音频解码器参数 AudioDecoderParams audioParams; audioParams.codecName = codec->name; audioParams.sampleRate = codecContext->sample_rate; audioParams.channels = codecContext->ch_layout.nb_channels; audioParams.sampleFormat = codecContext->sample_fmt; audioParams.lowLatency = false; audioParams.enableResampling = true; // 初始化音频解码器 if (m_audioDecoder->initialize(audioParams) != ErrorCode::SUCCESS) { Logger::instance().error("Failed to initialize audio decoder"); avcodec_free_context(&codecContext); return false; } // 打开音频解码器 if (m_audioDecoder->open(audioParams) != ErrorCode::SUCCESS) { Logger::instance().error("Failed to open audio decoder"); avcodec_free_context(&codecContext); return false; } // 初始化音频输出设备 if (m_audioOutput && !m_audioOutput->initialize(codecContext->sample_rate, codecContext->ch_layout.nb_channels, codecContext->sample_fmt)) { Logger::instance().error("Failed to initialize audio output"); avcodec_free_context(&codecContext); return false; } // 释放解码器上下文 avcodec_free_context(&codecContext); Logger::instance().info("Audio decoder setup successfully"); return true; } void PlayerCoreV2::resetDecoders() { if (m_videoDecoder) { m_videoDecoder->reset(); } if (m_audioDecoder) { m_audioDecoder->reset(); } } bool PlayerCoreV2::startReadThread() { try { // 确保之前的线程已经完全停止 if (m_readThread.joinable()) { Logger::instance().warning("Read thread still running, waiting for it to stop..."); m_readThread.join(); } m_readThread = std::thread(&PlayerCoreV2::readThreadFunc, this); Logger::instance().info("Read thread started"); return true; } catch (const std::exception& e) { Logger::instance().error("Failed to start read thread: " + std::string(e.what())); return false; } } bool PlayerCoreV2::startDecodeThreads() { try { if (m_mediaInfo.hasVideo) { // 确保之前的视频解码线程已经完全停止 if (m_videoDecodeThread.joinable()) { Logger::instance().warning("Video decode thread still running, waiting for it to stop..."); m_videoDecodeThread.join(); } m_videoDecodeThread = std::thread(&PlayerCoreV2::videoDecodeThreadFunc, this); Logger::instance().info("Video decode thread started"); } if (m_mediaInfo.hasAudio) { // 确保之前的音频解码线程已经完全停止 if (m_audioDecodeThread.joinable()) { Logger::instance().warning("Audio decode thread still running, waiting for it to stop..."); m_audioDecodeThread.join(); } m_audioDecodeThread = std::thread(&PlayerCoreV2::audioDecodeThreadFunc, this); Logger::instance().info("Audio decode thread started"); } return true; } catch (const std::exception& e) { Logger::instance().error("Failed to start decode threads: " + std::string(e.what())); return false; } } bool PlayerCoreV2::startVideoPlayThread() { try { // 确保之前的视频播放线程已经完全停止 if (m_videoPlayThread.joinable()) { Logger::instance().warning("Video play thread still running, waiting for it to stop..."); m_videoPlayThread.join(); } m_videoPlayThread = std::thread(&PlayerCoreV2::videoPlayThreadFunc, this); Logger::instance().info("Video play thread started"); return true; } catch (const std::exception& e) { Logger::instance().error("Failed to start video play thread: " + std::string(e.what())); return false; } } bool PlayerCoreV2::startAudioPlayThread() { try { // 确保之前的音频播放线程已经完全停止 if (m_audioPlayThread.joinable()) { Logger::instance().warning("Audio play thread still running, waiting for it to stop..."); m_audioPlayThread.join(); } m_audioPlayThread = std::thread(&PlayerCoreV2::audioPlayThreadFunc, this); Logger::instance().info("Audio play thread started"); return true; } catch (const std::exception& e) { Logger::instance().error("Failed to start audio play thread: " + std::string(e.what())); return false; } } bool PlayerCoreV2::initializeFFmpeg() { // FFmpeg初始化逻辑 av_log_set_level(AV_LOG_WARNING); return true; } void PlayerCoreV2::cleanup() { stopAllThreads(); if (m_formatContext) { avformat_close_input(&m_formatContext); m_formatContext = nullptr; } // if (m_synchronizer) { // m_synchronizer->close(); // } } void PlayerCoreV2::setState(PlayerState newState) { PlayerState oldState = m_state.exchange(newState); if (oldState != newState) { notifyStateChanged(newState); } } void PlayerCoreV2::notifyStateChanged(PlayerState newState) { if (m_eventCallback) { m_eventCallback->onStateChanged(newState); } } void PlayerCoreV2::notifyError(const std::string& error) { Logger::instance().error(error); if (m_eventCallback) { m_eventCallback->onErrorOccurred(error); } } void PlayerCoreV2::notifyPositionChanged() { if (m_eventCallback) { m_eventCallback->onPositionChanged(getCurrentTime()); } } void PlayerCoreV2::handleSyncError(double error, const std::string& reason) { Logger::instance().warning("Sync error: " + std::to_string(error * 1000) + "ms, reason: " + reason); if (m_eventCallback) { m_eventCallback->onSyncError(error, reason); } // 如果同步误差太大,尝试恢复 if (error > 0.2) { // 200ms attemptRecovery(); } } void PlayerCoreV2::attemptRecovery() { m_errorCount++; m_lastErrorTime = std::chrono::steady_clock::now(); Logger::instance().warning("Attempting recovery, error count: " + std::to_string(m_errorCount.load())); if (m_errorCount > 5) { Logger::instance().error("Too many errors, stopping playback"); handleError("Too many sync errors"); return; } // // 重置同步器 // if (m_synchronizer) { // m_synchronizer->reset(); // } // 清空部分缓冲区 if (m_videoFrameQueue) { m_videoFrameQueue->clear(); } if (m_audioFrameQueue) { m_audioFrameQueue->clear(); } } void PlayerCoreV2::handleError(const std::string& error) { setState(PlayerState::Error); notifyError(error); } void PlayerCoreV2::updateSynchronization() { if (!m_synchronizer || !m_threadsRunning) { return; } // 同步器会在内部自动更新 // 更新同步器统计信息 //m_synchronizer->updateStats(); // 获取同步器统计信息并更新播放器统计 std::lock_guard lock(m_mutex); auto syncStats = m_synchronizer->getStats(); m_stats.syncError = syncStats.audioVideoSyncError; m_stats.avgSyncError = syncStats.avgSyncError; m_stats.maxSyncError = syncStats.maxSyncError; m_stats.droppedFrames = syncStats.droppedFrames; m_stats.duplicatedFrames = syncStats.duplicatedFrames; } void PlayerCoreV2::updateStats() { std::lock_guard lock(m_mutex); // 更新帧率统计 int64_t currentFrameCount = m_frameCount; int64_t frameDiff = currentFrameCount - m_lastFrameCount; m_lastFrameCount = currentFrameCount; // 计算比特率等其他统计信息 if (m_formatContext) { m_stats.bitrate = m_formatContext->bit_rate / 1000.0; // kbps } } void PlayerCoreV2::updatePerformanceStats() { std::lock_guard lock(m_mutex); m_stats.cpuUsage = calculateCpuUsage(); m_stats.memoryUsage = calculateMemoryUsage(); } double PlayerCoreV2::calculateCpuUsage() { return 0.0; } double PlayerCoreV2::calculateMemoryUsage() { return 0.0; } void PlayerCoreV2::stopAllThreads() { Logger::instance().info("Stopping all threads..."); m_threadsShouldStop = true; // 唤醒所有等待的队列,确保线程能够检查停止标志 flushBuffers(); // 等待线程结束,添加超时保护 auto joinWithTimeout = [](std::thread& t, const std::string& name) { if (t.joinable()) { Logger::instance().info("Waiting for " + name + " to stop..."); t.join(); Logger::instance().info(name + " stopped"); } }; joinWithTimeout(m_readThread, "read thread"); joinWithTimeout(m_videoDecodeThread, "video decode thread"); joinWithTimeout(m_audioDecodeThread, "audio decode thread"); joinWithTimeout(m_videoPlayThread, "video play thread"); joinWithTimeout(m_audioPlayThread, "audio play thread"); m_threadsRunning = false; Logger::instance().info("All threads stopped successfully"); } void PlayerCoreV2::flushBuffers() { if (m_videoPacketQueue) { m_videoPacketQueue->clear(); m_videoPacketQueue->wakeup(); // 唤醒等待的解码线程 } if (m_audioPacketQueue) { m_audioPacketQueue->clear(); m_audioPacketQueue->wakeup(); // 唤醒等待的解码线程 } if (m_videoFrameQueue) { m_videoFrameQueue->clear(); m_videoFrameQueue->wakeup(); // 唤醒等待的播放线程 } if (m_audioFrameQueue) { m_audioFrameQueue->clear(); m_audioFrameQueue->wakeup(); // 唤醒等待的播放线程 } } void PlayerCoreV2::readThreadFunc() { Logger::instance().info("Read thread started"); AVPacket* packet = av_packet_alloc(); if (!packet) { Logger::instance().error("Failed to allocate packet"); return; } while (!m_threadsShouldStop) { // 检查暂停状态,类似ffplay.c中的paused检查 bool pausedState = m_paused.load(); Logger::instance().debug("Read thread loop - m_paused: " + std::to_string(pausedState)); if (pausedState) { Logger::instance().info("Read thread entering pause wait"); std::unique_lock lock(m_pauseMutex); m_pauseCondition.wait(lock, [this] { return !m_paused || m_threadsShouldStop; }); if (m_threadsShouldStop) { break; } Logger::instance().info("Read thread exiting pause wait"); // 暂停状态结束后,继续下一次循环 continue; } // 检查是否需要seek if (m_seeking) { std::unique_lock lock(m_seekMutex); // 获取seek参数 int64_t seekMinTime = m_seekMinTime; int64_t seekTarget = m_seekTarget; int64_t seekMaxTime = m_seekMaxTime; int flags = m_seekFlags; Logger::instance().info("Performing precise seek - min: " + std::to_string(seekMinTime) + ", target: " + std::to_string(seekTarget) + ", max: " + std::to_string(seekMaxTime) + ", flags: " + std::to_string(flags)); // 首先尝试定位到最近的关键帧以确保视频解码正常 int ret = av_seek_frame(m_formatContext, -1, seekTarget, AVSEEK_FLAG_BACKWARD); if (ret < 0) { Logger::instance().warning("Video keyframe seek failed, trying precise seek"); // 如果关键帧定位失败,回退到精确seek ret = avformat_seek_file(m_formatContext, -1, seekMinTime, seekTarget, seekMaxTime, flags); if (ret < 0) { Logger::instance().error("All seek methods failed"); } else { Logger::instance().info("Fallback precise seek completed to: " + std::to_string(seekTarget)); } } else { Logger::instance().info("Keyframe seek completed successfully to target: " + std::to_string(seekTarget)); } // 清空缓冲区 flushBuffers(); // 重置解码器 resetDecoders(); // 重置时间 if (flags & AVSEEK_FLAG_BYTE) { m_synchronizer->setClock(av::utils::ClockType::EXTERNAL, NAN, 0); } else { m_synchronizer->setClock(av::utils::ClockType::EXTERNAL, seekTarget / (double) AV_TIME_BASE, 0); } m_seeking = false; setState(m_state == PlayerState::Seeking ? PlayerState::Playing : m_state.load()); lock.unlock(); m_seekCondition.notify_all(); } // 检查队列是否接近容量上限 - 使用队列容量的90%作为警告阈值,但不丢弃包 bool videoQueueNearFull = m_videoPacketQueue && m_videoPacketQueue->size() > 800; bool audioQueueNearFull = m_audioPacketQueue && m_audioPacketQueue->size() > 800; if (videoQueueNearFull || audioQueueNearFull) { // 队列接近满,暂停一段时间让解码线程处理 Logger::instance().warning("Packet queue is getting large: Video=" + std::to_string(m_videoPacketQueue ? m_videoPacketQueue->size() : 0) + ", Audio=" + std::to_string(m_audioPacketQueue ? m_audioPacketQueue->size() : 0) + ". Slowing down read thread."); std::this_thread::sleep_for(std::chrono::milliseconds(50)); continue; } // 在读取数据包前检查暂停状态,避免在读取期间阻塞 if (m_paused.load()) { Logger::instance().debug("Read thread detected pause before av_read_frame"); continue; } // 读取数据包 int ret = av_read_frame(m_formatContext, packet); if (ret < 0) { if (ret == AVERROR_EOF) { Logger::instance().info("End of file reached, sending EOF packets to queues"); // 向视频和音频队列分别发送EOF标记,让解码线程知道文件结束 if (m_videoPacketQueue && m_mediaInfo.hasVideo) { AVPacket* eofPacket = av_packet_alloc(); if (eofPacket) { eofPacket->data = nullptr; eofPacket->size = 0; eofPacket->stream_index = -1; // 特殊标记表示EOF m_videoPacketQueue->push(eofPacket); Logger::instance().info("EOF packet sent to video queue"); } } if (m_audioPacketQueue && m_mediaInfo.hasAudio) { AVPacket* eofPacket = av_packet_alloc(); if (eofPacket) { eofPacket->data = nullptr; eofPacket->size = 0; eofPacket->stream_index = -1; // 特殊标记表示EOF m_audioPacketQueue->push(eofPacket); Logger::instance().info("EOF packet sent to audio queue"); } } break; } else { Logger::instance().error("Error reading frame: " + std::to_string(ret)); std::this_thread::sleep_for(std::chrono::milliseconds(10)); continue; } } // 根据流索引将数据包分发到对应队列 if (packet->stream_index == m_mediaInfo.videoStreamIndex && m_videoStreamEnabled) { // 视频包放入视频队列 - 不考虑队列是否已满,确保不丢包 if (m_videoPacketQueue) { AVPacket* packetCopy = av_packet_alloc(); if (packetCopy && av_packet_ref(packetCopy, packet) == 0) { m_videoPacketQueue->push(packetCopy); // 记录队列大小,用于监控 if (m_videoPacketQueue->size() % 100 == 0) { Logger::instance().debug("Video packet queue size: " + std::to_string(m_videoPacketQueue->size())); } } else { av_packet_free(&packetCopy); } } } else if (packet->stream_index == m_mediaInfo.audioStreamIndex && m_audioStreamEnabled) { // 音频包放入音频队列 - 不考虑队列是否已满,确保不丢包 if (m_audioPacketQueue) { AVPacket* packetCopy = av_packet_alloc(); if (packetCopy && av_packet_ref(packetCopy, packet) == 0) { m_audioPacketQueue->push(packetCopy); // 记录队列大小,用于监控 if (m_audioPacketQueue->size() % 100 == 0) { Logger::instance().debug("Audio packet queue size: " + std::to_string(m_audioPacketQueue->size())); } } else { av_packet_free(&packetCopy); } } } m_audioPacketQueue->wakeup(); m_videoPacketQueue->wakeup(); av_packet_unref(packet); } av_packet_free(&packet); Logger::instance().info("Read thread finished"); } void PlayerCoreV2::videoDecodeThreadFunc() { Logger::instance().info("Video decode thread started"); int packetCount = 0; int frameCount = 0; while (!m_threadsShouldStop) { // 检查暂停状态,类似ffplay.c中的paused检查 bool pausedState = m_paused.load(); Logger::instance().debug("Video decode thread loop - m_paused: " + std::to_string(pausedState)); if (pausedState) { Logger::instance().info("Video decode thread entering pause wait"); std::unique_lock lock(m_pauseMutex); m_pauseCondition.wait(lock, [this] { return !m_paused || m_threadsShouldStop; }); if (m_threadsShouldStop) { break; } Logger::instance().info("Video decode thread exiting pause wait"); // 暂停状态结束后,继续下一次循环 continue; } if (!m_videoPacketQueue || !m_videoFrameQueue || !m_videoDecoder) { std::this_thread::sleep_for(std::chrono::milliseconds(10)); continue; } if (m_seeking) { std::this_thread::sleep_for(std::chrono::milliseconds(10)); continue; } Logger::instance().info("Video decode thread read Packet111"); // 从视频包队列获取包 AVPacket* packet = nullptr; while (!m_threadsShouldStop && !packet) { // 在 pop 操作前检查暂停状态,避免在 pop 期间阻塞 if (m_paused.load()) { Logger::instance().debug("Video decode thread detected pause before pop"); break; } Logger::instance().info("Video decode thread read Packet"); packet = m_videoPacketQueue->pop(10); // 减少超时时间以提高seek响应速度 Logger::instance().infof("Video decode thread read Packet %d", packet); if (packet) { // 检查是否是EOF标记 if (packet->stream_index == -1 && packet->data == nullptr) { Logger::instance().info("Video decode thread received EOF packet"); av_packet_free(&packet); // 向视频帧队列发送EOF标记 if (m_videoFrameQueue) { AVFramePtr eofFrame = makeAVFrame(); if (eofFrame) { eofFrame->data[0] = nullptr; eofFrame->width = 0; eofFrame->height = 0; eofFrame->pts = AV_NOPTS_VALUE; m_videoFrameQueue->push(std::move(eofFrame)); Logger::instance().info("EOF frame sent to video frame queue"); } } // 视频解码线程结束 Logger::instance().info("Video decode thread finishing due to EOF"); return; } } if (!packet) { std::this_thread::sleep_for(std::chrono::milliseconds(5)); } } if (!packet) { Logger::instance().debug("Video decode thread: no more packets available"); continue; } packetCount++; Logger::instance().debug("Video decode thread got packet #" + std::to_string(packetCount) + ", size=" + std::to_string(packet->size) + ", pts=" + std::to_string(packet->pts)); // 解码视频帧 AVPacketPtr packetPtr(packet); std::vector frames; ErrorCode decodeResult = m_videoDecoder->decode(packetPtr, frames); Logger::instance().debug("Video decode result: " + std::to_string(static_cast(decodeResult)) + ", frames count: " + std::to_string(frames.size())); if (decodeResult == ErrorCode::SUCCESS) { for (auto& framePtr : frames) { if (framePtr && !m_threadsShouldStop) { frameCount++; Logger::instance().debug("Processing video frame #" + std::to_string(frameCount) + ", width=" + std::to_string(framePtr->width) + ", height=" + std::to_string(framePtr->height) + ", pts=" + std::to_string(framePtr->pts)); // 设置帧的时间戳(保持原始PTS,在播放时再转换) if (framePtr->pts != AV_NOPTS_VALUE) { Logger::instance().debug("Frame PTS: " + std::to_string(framePtr->pts)); } // 将帧放入队列 - 不丢弃任何帧 // 使用移动语义转移智能指针所有权 m_videoFrameQueue->push(std::move(framePtr)); m_frameCount++; Logger::instance().debug("Video frame pushed to queue, queue size: " + std::to_string(m_videoFrameQueue->size())); // 如果队列大小超过警告阈值,记录警告但不丢弃 if (m_videoFrameQueue->size() > 80) { Logger::instance().warning("Video frame queue is getting large: " + std::to_string(m_videoFrameQueue->size()) + " frames. Performance may be affected."); } } } } else { Logger::instance().warning("Video decode failed with error: " + std::to_string(static_cast(decodeResult))); } } Logger::instance().info("Video decode thread finished, packets processed: " + std::to_string(packetCount) + ", frames decoded: " + std::to_string(frameCount)); } void PlayerCoreV2::audioDecodeThreadFunc() { Logger::instance().info("Audio decode thread started"); int packetCount = 0; int frameCount = 0; while (!m_threadsShouldStop) { // 检查暂停状态,类似ffplay.c中的paused检查 bool pausedState = m_paused.load(); Logger::instance().debug("Audio decode thread loop - m_paused: " + std::to_string(pausedState)); if (pausedState) { Logger::instance().info("Audio decode thread entering pause wait"); std::unique_lock lock(m_pauseMutex); m_pauseCondition.wait(lock, [this] { return !m_paused || m_threadsShouldStop; }); if (m_threadsShouldStop) { break; } Logger::instance().info("Audio decode thread exiting pause wait"); // 暂停状态结束后,继续下一次循环 continue; } if (!m_audioPacketQueue || !m_audioFrameQueue || !m_audioDecoder) { std::this_thread::sleep_for(std::chrono::milliseconds(10)); continue; } if (m_seeking) { std::this_thread::sleep_for(std::chrono::milliseconds(10)); continue; } // 从音频包队列获取包 AVPacket* packet = nullptr; while (!m_threadsShouldStop && !packet) { // 在 pop 操作前检查暂停状态,避免在 pop 期间阻塞 if (m_paused.load()) { Logger::instance().debug("Audio decode thread detected pause before pop"); break; } packet = m_audioPacketQueue->pop(10); // 减少超时时间以提高seek响应速度 if (packet) { // 检查是否是EOF标记 if (packet->stream_index == -1 && packet->data == nullptr) { Logger::instance().info("Audio decode thread received EOF packet"); av_packet_free(&packet); // 向音频帧队列发送EOF标记 if (m_audioFrameQueue) { AVFramePtr eofFrame = makeAVFrame(); if (eofFrame) { eofFrame->data[0] = nullptr; eofFrame->nb_samples = 0; eofFrame->pts = AV_NOPTS_VALUE; m_audioFrameQueue->push(std::move(eofFrame)); Logger::instance().info("EOF frame sent to audio frame queue"); } } // 音频解码线程结束 Logger::instance().info("Audio decode thread finishing due to EOF"); return; } } if (!packet) { std::this_thread::sleep_for(std::chrono::milliseconds(5)); } } if (!packet) { Logger::instance().debug("Audio decode thread: no more packets available"); continue; } packetCount++; Logger::instance().debug("Audio decode thread got packet #" + std::to_string(packetCount) + ", size=" + std::to_string(packet->size) + ", pts=" + std::to_string(packet->pts)); // 解码音频帧 AVPacketPtr packetPtr(packet); std::vector frames; ErrorCode decodeResult = m_audioDecoder->decode(packetPtr, frames); Logger::instance().debug("Audio decode result: " + std::to_string(static_cast(decodeResult)) + ", frames count: " + std::to_string(frames.size())); if (decodeResult == ErrorCode::SUCCESS) { for (auto& framePtr : frames) { if (framePtr && !m_threadsShouldStop) { frameCount++; Logger::instance().debug("Processing audio frame #" + std::to_string(frameCount) + ", nb_samples=" + std::to_string(framePtr->nb_samples) + ", pts=" + std::to_string(framePtr->pts)); // 设置帧的时间戳(保持原始PTS,在播放时再转换) if (framePtr->pts != AV_NOPTS_VALUE) { Logger::instance().debug("Audio Frame PTS: " + std::to_string(framePtr->pts)); } // 将帧放入队列 - 不丢弃任何帧 // 使用移动语义转移智能指针所有权 m_audioFrameQueue->push(std::move(framePtr)); Logger::instance().debug("Audio frame pushed to queue, queue size: " + std::to_string(m_audioFrameQueue->size())); // 如果队列大小超过警告阈值,记录警告但不丢弃 if (m_audioFrameQueue->size() > 300) { Logger::instance().warning("Audio frame queue is getting large: " + std::to_string(m_audioFrameQueue->size()) + " frames. Performance may be affected."); } } } } else { Logger::instance().warning("Audio decode failed with error: " + std::to_string(static_cast(decodeResult))); } // packet已经被packetPtr管理,不需要手动释放 } Logger::instance().info("Audio decode thread finished, packets processed: " + std::to_string(packetCount) + ", frames decoded: " + std::to_string(frameCount)); } void PlayerCoreV2::videoPlayThreadFunc() { Logger::instance().info("Video play thread started"); // 用于计算帧持续时间的变量 AVFramePtr lastFrame = nullptr; while (!m_threadsShouldStop) { // 检查暂停状态,类似ffplay.c中的paused检查 bool pausedState = m_paused.load(); Logger::instance().debug("Video play thread loop - m_paused: " + std::to_string(pausedState)); if (pausedState) { Logger::instance().info("Video play thread entering pause wait"); std::unique_lock lock(m_pauseMutex); m_pauseCondition.wait(lock, [this] { return !m_paused || m_threadsShouldStop; }); if (m_threadsShouldStop) { break; } Logger::instance().info("Video play thread exiting pause wait"); // 暂停状态结束后,继续下一次循环 continue; } if (!m_videoFrameQueue || !m_synchronizer) { std::this_thread::sleep_for(std::chrono::milliseconds(10)); continue; } // 在pop之前再次检查暂停状态,避免在pop期间阻塞 if (m_paused.load()) { Logger::instance().debug("Video play thread detected pause before pop"); continue; } // 获取视频帧,使用更短的超时时间以提高暂停响应速度 AVFramePtr frame = m_videoFrameQueue->pop(1); // 使用1ms超时以快速响应暂停 if (!frame) { // 检查是否应该继续等待 if (m_threadsShouldStop) { break; } std::this_thread::sleep_for(std::chrono::milliseconds(10)); continue; } // 获取帧后立即检查暂停状态,如果已暂停则释放帧并继续 bool pausedAfterPop = m_paused.load(); Logger::instance().debug("Video play thread after pop - m_paused: " + std::to_string(pausedAfterPop)); if (pausedAfterPop) { Logger::instance().info("Video play thread releasing frame due to pause"); frame.reset(); // 智能指针自动释放 continue; } Logger::instance().debug("Video play thread got frame, pts=" + std::to_string(frame->pts)); // frame 本身就是智能指针,直接使用 AVFramePtr& framePtr = frame; // 使用同步器计算视频帧显示时间 double pts = frame->pts * av_q2d(m_formatContext->streams[m_mediaInfo.videoStreamIndex]->time_base); // 只在非暂停状态下更新时钟和处理帧 m_synchronizer->setClock(av::utils::ClockType::VIDEO, pts, 0); // 计算帧持续时间 double last_duration = 0.0; if (lastFrame && lastFrame->pts != AV_NOPTS_VALUE && frame->pts != AV_NOPTS_VALUE) { // 计算两帧之间的时间差 double last_pts = lastFrame->pts * av_q2d(m_formatContext->streams[m_mediaInfo.videoStreamIndex]->time_base); last_duration = pts - last_pts; if (last_duration < 0 || last_duration > 10.0) { // 如果持续时间不合理,使用帧率的倒数作为默认值 last_duration = 1.0 / m_mediaInfo.fps; } } else { // 如果没有上一帧,使用帧率的倒数作为默认值 last_duration = 1.0 / m_mediaInfo.fps; } // 计算视频帧延迟并决定是否显示 FrameDecision decision = m_synchronizer->synchronizeVideo(pts, 0, last_duration); if (decision.action == FrameAction::DISPLAY) { // 如果需要延迟显示,等待指定时间 if (decision.delay > 0.0) { std::this_thread::sleep_for( std::chrono::microseconds(static_cast(decision.delay * 1000000))); } // 通知显示帧 if (m_eventCallback) { m_eventCallback->onVideoFrameReady(framePtr.get()); Logger::instance().debug("Video frame ready for display, pts=" + std::to_string(pts) + ", delay=" + std::to_string(decision.delay)); } // 更新上一帧指针 lastFrame.reset(); // 智能指针自动释放旧帧 // 创建当前帧的副本 AVFramePtr clonedFrame = makeAVFrame(); if (clonedFrame && av_frame_ref(clonedFrame.get(), frame.get()) == 0) { lastFrame = std::move(clonedFrame); } } else if (decision.action == FrameAction::DROP) { Logger::instance().debug("Video frame dropped due to sync, pts=" + std::to_string(pts) + ", error=" + std::to_string(decision.syncError)); } } // 清理最后一帧 - 智能指针自动清理 lastFrame.reset(); Logger::instance().info("Video play thread finished"); } void PlayerCoreV2::audioPlayThreadFunc() { Logger::instance().info("Audio play thread started"); int frameCount = 0; while (!m_threadsShouldStop) { // 检查暂停状态,类似ffplay.c中的paused检查 bool pausedState = m_paused.load(); Logger::instance().debug("Audio play thread loop - m_paused: " + std::to_string(pausedState)); if (pausedState) { Logger::instance().info("Audio play thread entering pause wait"); std::unique_lock lock(m_pauseMutex); m_pauseCondition.wait(lock, [this] { return !m_paused || m_threadsShouldStop; }); if (m_threadsShouldStop) { break; } Logger::instance().info("Audio play thread exiting pause wait"); // 暂停状态结束后,继续下一次循环 continue; } if (!m_audioFrameQueue || !m_synchronizer || !m_audioOutput) { std::this_thread::sleep_for(std::chrono::milliseconds(10)); continue; } // 在pop之前再次检查暂停状态,避免在pop期间阻塞 if (m_paused.load()) { Logger::instance().debug("Audio play thread detected pause before pop"); continue; } // 获取音频帧,使用更短的超时时间以提高暂停响应速度 AVFramePtr frame = m_audioFrameQueue->pop(1); // 使用1ms超时以快速响应暂停 if (!frame) { // 检查是否应该继续等待 if (m_threadsShouldStop) { break; } std::this_thread::sleep_for(std::chrono::milliseconds(10)); continue; } // 获取帧后立即检查暂停状态,如果已暂停则释放帧并继续 bool pausedAfterPop = m_paused.load(); Logger::instance().debug("Audio play thread after pop - m_paused: " + std::to_string(pausedAfterPop)); if (pausedAfterPop) { Logger::instance().info("Audio play thread releasing frame due to pause"); frame.reset(); // 智能指针自动释放 continue; } // 检查是否是EOF帧 if (frame->data[0] == nullptr && frame->nb_samples == 0) { Logger::instance().info("Audio play thread received EOF frame, playback completed"); frame.reset(); // 智能指针自动释放 // 通知播放完成 setState(PlayerState::Stopped); Logger::instance().info("Audio playback finished naturally"); return; } frameCount++; Logger::instance().debug("Audio play thread got frame #" + std::to_string(frameCount) + ", pts=" + std::to_string(frame->pts) + ", nb_samples=" + std::to_string(frame->nb_samples)); // frame 本身就是智能指针,直接使用 AVFramePtr& framePtr = frame; // 计算音频PTS double pts = frame->pts * av_q2d(m_formatContext->streams[m_mediaInfo.audioStreamIndex]->time_base); // 只在非暂停状态下更新音频时钟 m_synchronizer->setClock(av::utils::ClockType::AUDIO, pts, 0); // 同步音频样本数量 int originalSamples = frame->nb_samples; int adjustedSamples = m_synchronizer->synchronizeAudio((short*) frame->data[0], frame->nb_samples, pts); if (adjustedSamples != originalSamples) { Logger::instance().debug("Audio samples adjusted from " + std::to_string(originalSamples) + " to " + std::to_string(adjustedSamples) + " for sync"); } if (m_paused) { continue; } { Logger::instance().debug("Writing audio frame to output device"); bool writeResult = m_audioOutput->writeFrame(framePtr); Logger::instance().debug("Audio frame write result: " + std::to_string(writeResult)); // 如果写入失败,等待一段时间避免快速循环 if (!writeResult) { Logger::instance().warning("Audio frame write failed, waiting before next frame"); std::this_thread::sleep_for(std::chrono::milliseconds(10)); } } // framePtr 会自动释放内存,无需手动调用 av_frame_free } Logger::instance().info("Audio play thread finished, total frames processed: " + std::to_string(frameCount)); } } // namespace player } // namespace av