#include "avopenglwidget.h" #include #include #include #include #include #include #include AVOpenGLWidget::AVOpenGLWidget(QWidget* parent) : QOpenGLWidget(parent) , m_program(nullptr) , m_textureId(0) , m_frameData(nullptr) , m_frameWidth(0) , m_frameHeight(0) , m_frameFormat(0) , m_frameUpdated(false) , m_initialized(false) , m_keepAspectRatio(true) , m_gray(false) , m_threshold(false) , m_thresholdValue(0.5f) , m_blur(false) , m_blurRadius(1.0f) , m_reverse(false) , m_colorReduce(false) , m_colorReduceLevel(0) , m_gamma(false) , m_gammaValue(1.0f) , m_contrastBright(false) , m_contrast(1.0f) , m_brightness(0.0f) , m_mirror(false) , m_noVideoTip(QStringLiteral("视频未开始")) , m_tipTexture(0) , m_tipAngle(0.0f) { // 设置OpenGL组件的尺寸策略,确保有足够的显示空间 setMinimumSize(320, 240); setSizePolicy(QSizePolicy::Expanding, QSizePolicy::Expanding); // setAttribute(Qt::WA_NoSystemBackground, true); // setAttribute(Qt::WA_OpaquePaintEvent, true); // setAutoFillBackground(false); // 设置顶点坐标 m_vertices[0] = -1.0f; m_vertices[1] = -1.0f; m_vertices[2] = 1.0f; m_vertices[3] = -1.0f; m_vertices[4] = -1.0f; m_vertices[5] = 1.0f; m_vertices[6] = 1.0f; m_vertices[7] = 1.0f; // 设置纹理坐标 m_texCoords[0] = 0.0f; m_texCoords[1] = 1.0f; m_texCoords[2] = 1.0f; m_texCoords[3] = 1.0f; m_texCoords[4] = 0.0f; m_texCoords[5] = 0.0f; m_texCoords[6] = 1.0f; m_texCoords[7] = 0.0f; // 3D文本旋转动画定时器 m_tipTimer = new QTimer(this); connect(m_tipTimer, &QTimer::timeout, this, [this]() { m_tipAngle += 2.0f; if (m_tipAngle > 360.0f) m_tipAngle -= 360.0f; update(); }); m_tipTimer->start(30); // 初始化时生成一次纹理,防止初次显示时未生成 m_frameData = nullptr; m_frameUpdated = false; m_tipImage = QImage(); m_tipTexture = 0; } AVOpenGLWidget::~AVOpenGLWidget() { Close(); } bool AVOpenGLWidget::Open(unsigned int width, unsigned int height) { QMutexLocker locker(&m_mutex); m_frameWidth = width; m_frameHeight = height; // 如果已经有数据,释放它 if (m_frameData) { delete[] m_frameData; } // 分配新的内存 m_frameData = new unsigned char[width * height * 4]; // RGBA格式 memset(m_frameData, 0, width * height * 4); return true; } void AVOpenGLWidget::Close() { makeCurrent(); if (m_textureId) { glDeleteTextures(1, &m_textureId); m_textureId = 0; } if (m_program) { delete m_program; m_program = nullptr; } doneCurrent(); // 释放帧数据 QMutexLocker locker(&m_mutex); if (m_frameData) { delete[] m_frameData; m_frameData = nullptr; } m_frameWidth = 0; m_frameHeight = 0; m_frameUpdated = false; m_initialized = false; } void AVOpenGLWidget::initializeGL() { initializeOpenGLFunctions(); glClearColor(0.0f, 0.0f, 0.0f, 1.0f); // 创建统一shader,支持多特效 if (m_program) { delete m_program; m_program = nullptr; } m_program = new QOpenGLShaderProgram(); m_program->addShaderFromSourceCode(QOpenGLShader::Vertex, "attribute vec2 vertexIn;\n" "attribute vec2 textureIn;\n" "varying vec2 textureOut;\n" "void main(void)\n" "{\n" " gl_Position = vec4(vertexIn, 0.0, 1.0);\n" " textureOut = textureIn;\n" "}\n"); m_program->addShaderFromSourceCode(QOpenGLShader::Fragment, R"Raw( varying vec2 textureOut; uniform sampler2D texture; uniform vec2 uTextureSize; uniform bool uGray; uniform bool uThreshold; uniform float uThresholdValue; uniform bool uBlur; uniform float uBlurRadius; uniform bool uReverse; uniform bool uColorReduce; uniform int uColorReduceLevel; uniform bool uGamma; uniform float uGammaValue; uniform bool uContrastBright; uniform float uContrast; uniform float uBrightness; uniform bool uMirror; void main(void) { vec2 uv = textureOut; if (uMirror) { uv.x = 1.0 - uv.x; } vec4 color = texture2D(texture, uv); // 灰度 if (uGray) { float gray = dot(color.rgb, vec3(0.299, 0.587, 0.114)); color = vec4(gray, gray, gray, color.a); } // 二值化 if (uThreshold) { float v = dot(color.rgb, vec3(0.299, 0.587, 0.114)); float th = v > uThresholdValue ? 1.0 : 0.0; color = vec4(th, th, th, color.a); } // 简单3x3均值模糊 if (uBlur) { vec2 tex_offset = vec2(1.0) / uTextureSize; vec4 sum = vec4(0.0); for (int dx = -1; dx <= 1; ++dx) for (int dy = -1; dy <= 1; ++dy) sum += texture2D(texture, uv + vec2(dx, dy) * tex_offset * uBlurRadius); color = sum / 9.0; } // 反色 if (uReverse) { color.rgb = vec3(1.0) - color.rgb; } // 色彩减少 if (uColorReduce) { color.rgb = floor(color.rgb * float(uColorReduceLevel)) / float(uColorReduceLevel); } // 伽马 if (uGamma) { color.rgb = pow(color.rgb, vec3(1.0 / uGammaValue)); } // 对比度/亮度 if (uContrastBright) { color.rgb = color.rgb * uContrast + uBrightness; } gl_FragColor = color; } )Raw"); m_program->bindAttributeLocation("vertexIn", 0); m_program->bindAttributeLocation("textureIn", 1); m_program->link(); // 创建纹理 glGenTextures(1, &m_textureId); glBindTexture(GL_TEXTURE_2D, m_textureId); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glBindTexture(GL_TEXTURE_2D, 0); m_initialized = true; } void AVOpenGLWidget::resizeGL(int width, int height) { glViewport(0, 0, width, height); } // 文本转OpenGL纹理 void AVOpenGLWidget::updateTipTexture() { // 在进行任何GL调用前,确保上下文已当前 bool needMakeCurrent = (QOpenGLContext::currentContext() != context()); if (needMakeCurrent) makeCurrent(); QFont font; font.setPointSize(48); font.setBold(true); QFontMetrics fm(font); int w = fm.horizontalAdvance(m_noVideoTip) + 40; int h = fm.height() + 40; QImage img(w, h, QImage::Format_ARGB32_Premultiplied); img.fill(Qt::transparent); QPainter p(&img); p.setFont(font); p.setPen(Qt::white); p.setRenderHint(QPainter::Antialiasing); p.drawText(img.rect(), Qt::AlignCenter, m_noVideoTip); p.end(); m_tipImage = img; if (m_tipTexture) { glDeleteTextures(1, &m_tipTexture); } glGenTextures(1, &m_tipTexture); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, m_tipTexture); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, img.width(), img.height(), 0, GL_BGRA, GL_UNSIGNED_BYTE, img.bits()); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glBindTexture(GL_TEXTURE_2D, 0); if (needMakeCurrent) doneCurrent(); } // 3D绘制方法 void AVOpenGLWidget::drawNoVideoTip3D() { glClearColor(0, 0, 0, 1); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glEnable(GL_DEPTH_TEST); glEnable(GL_TEXTURE_2D); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, m_tipTexture); glMatrixMode(GL_PROJECTION); glLoadIdentity(); float aspect = float(width()) / float(height()); glOrtho(-aspect, aspect, -1, 1, -10, 10); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); glTranslatef(0, 0, 0); glRotatef(m_tipAngle, 0, 1, 0); float w = float(m_tipImage.width()) / width(); float h = float(m_tipImage.height()) / height(); glBegin(GL_QUADS); glTexCoord2f(0, 1); glVertex3f(-w, -h, 0); glTexCoord2f(1, 1); glVertex3f(w, -h, 0); glTexCoord2f(1, 0); glVertex3f(w, h, 0); glTexCoord2f(0, 0); glVertex3f(-w, h, 0); glEnd(); glBindTexture(GL_TEXTURE_2D, 0); glDisable(GL_TEXTURE_2D); glDisable(GL_DEPTH_TEST); } void AVOpenGLWidget::paintGL() { glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); QMutexLocker locker(&m_mutex); if (!m_frameData || m_frameWidth <= 0 || m_frameHeight <= 0 || !m_frameUpdated) { if (m_tipTexture == 0) updateTipTexture(); drawNoVideoTip3D(); return; } // 绑定纹理并更新数据 glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, m_textureId); // 设置像素对齐,避免部分驱动在行对齐处理上造成花屏 GLint oldUnpackAlign = 4; glGetIntegerv(GL_UNPACK_ALIGNMENT, &oldUnpackAlign); glPixelStorei(GL_UNPACK_ALIGNMENT, 1); // 仅当尺寸变化时重新定义纹理,否则使用 glTexSubImage2D 提升稳定性与性能 if (m_texWidth != m_frameWidth || m_texHeight != m_frameHeight) { glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, m_frameWidth, m_frameHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, m_frameData); m_texWidth = m_frameWidth; m_texHeight = m_frameHeight; } else { glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, m_frameWidth, m_frameHeight, GL_RGBA, GL_UNSIGNED_BYTE, m_frameData); } // 还原像素对齐 glPixelStorei(GL_UNPACK_ALIGNMENT, oldUnpackAlign); m_program->bind(); m_program->setUniformValue("texture", 0); m_program->setUniformValue("uTextureSize", QVector2D(m_frameWidth, m_frameHeight)); m_program->setUniformValue("uGray", m_gray); m_program->setUniformValue("uThreshold", m_threshold); m_program->setUniformValue("uThresholdValue", m_thresholdValue); m_program->setUniformValue("uBlur", m_blur); m_program->setUniformValue("uBlurRadius", m_blurRadius); m_program->setUniformValue("uReverse", m_reverse); m_program->setUniformValue("uColorReduce", m_colorReduce); m_program->setUniformValue("uColorReduceLevel", m_colorReduceLevel); m_program->setUniformValue("uGamma", m_gamma); m_program->setUniformValue("uGammaValue", m_gammaValue); m_program->setUniformValue("uContrastBright", m_contrastBright); m_program->setUniformValue("uContrast", m_contrast); m_program->setUniformValue("uBrightness", m_brightness); m_program->setUniformValue("uMirror", m_mirror); m_program->enableAttributeArray(0); m_program->enableAttributeArray(1); m_program->setAttributeArray(0, m_vertices, 2); m_program->setAttributeArray(1, m_texCoords, 2); // 保持比例 if (m_keepAspectRatio) { QSize widgetSize = size(); double widgetRatio = double(widgetSize.width()) / widgetSize.height(); double videoRatio = double(m_frameWidth) / m_frameHeight; int x = 0, y = 0, w = widgetSize.width(), h = widgetSize.height(); if (widgetRatio > videoRatio) { w = int(h * videoRatio); x = (widgetSize.width() - w) / 2; } else { h = int(w / videoRatio); y = (widgetSize.height() - h) / 2; } glViewport(x, y, w, h); } else { glViewport(0, 0, width(), height()); } glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); m_program->disableAttributeArray(0); m_program->disableAttributeArray(1); m_program->release(); glBindTexture(GL_TEXTURE_2D, 0); } void AVOpenGLWidget::updateFrame(const AVOpenGLWidget::RGBAFrame& frame) { if (!frame.data || frame.width <= 0 || frame.height <= 0) return; QMutexLocker locker(&m_mutex); // 如果尺寸变化,重新分配内存 if (m_frameWidth != frame.width || m_frameHeight != frame.height) { if (m_frameData) { delete[] m_frameData; m_frameData = nullptr; } m_frameWidth = frame.width; m_frameHeight = frame.height; // 检查内存分配大小是否合理 size_t dataSize = static_cast(m_frameWidth) * m_frameHeight * 4; if (dataSize > 0 && dataSize < SIZE_MAX / 4) { try { m_frameData = new unsigned char[dataSize]; // RGBA格式 } catch (const std::bad_alloc&) { m_frameData = nullptr; return; } } else { return; } } // 确保m_frameData已正确分配 if (!m_frameData) { return; } // 复制帧数据 memcpy(m_frameData, frame.data, m_frameWidth * m_frameHeight * 4); m_frameUpdated = true; // 请求重绘 update(); } bool AVOpenGLWidget::convertFromAVFrame(AVFrame* frame) { if (!frame || frame->width <= 0 || frame->height <= 0 || !frame->data[0]) return false; QMutexLocker locker(&m_mutex); // 如果尺寸变化,重新分配内存 if (m_frameWidth != frame->width || m_frameHeight != frame->height) { if (m_frameData) { delete[] m_frameData; m_frameData = nullptr; } m_frameWidth = frame->width; m_frameHeight = frame->height; // 检查内存分配大小是否合理 size_t dataSize = static_cast(m_frameWidth) * m_frameHeight * 4; if (dataSize > 0 && dataSize < SIZE_MAX / 4) { try { m_frameData = new unsigned char[dataSize]; // RGBA格式 } catch (const std::bad_alloc&) { m_frameData = nullptr; return false; } } else { return false; } } // 确保m_frameData已正确分配,即使尺寸没有变化 if (!m_frameData) { // 尺寸没有变化但m_frameData为空,需要重新分配 size_t dataSize = static_cast(m_frameWidth) * m_frameHeight * 4; if (dataSize > 0 && dataSize < SIZE_MAX / 4) { try { m_frameData = new unsigned char[dataSize]; // RGBA格式 } catch (const std::bad_alloc&) { m_frameData = nullptr; return false; } } else { return false; } } // 检查源数据是否有效(防止frame.data[0]为空字符或无效数据) if (!frame->data[0] || frame->linesize[0] <= 0) { qDebug() << "Invalid frame data or linesize"; return false; } // 根据不同的像素格式进行转换 switch (frame->format) { case AV_PIX_FMT_RGBA: { // 直接复制RGBA数据 for (int y = 0; y < frame->height; y++) { memcpy(m_frameData + y * m_frameWidth * 4, frame->data[0] + y * frame->linesize[0], frame->width * 4); } } break; case AV_PIX_FMT_RGB24: { // RGB24转RGBA for (int y = 0; y < frame->height; y++) { uint8_t* src = frame->data[0] + y * frame->linesize[0]; uint8_t* dst = m_frameData + y * m_frameWidth * 4; for (int x = 0; x < frame->width; x++) { *dst++ = *src++; // R *dst++ = *src++; // G *dst++ = *src++; // B *dst++ = 255; // A } } } break; case AV_PIX_FMT_BGR0: case AV_PIX_FMT_BGRA: { // BGRA转RGBA for (int y = 0; y < frame->height; y++) { uint8_t* src = frame->data[0] + y * frame->linesize[0]; uint8_t* dst = m_frameData + y * m_frameWidth * 4; for (int x = 0; x < frame->width; x++) { uint8_t b = *src++; uint8_t g = *src++; uint8_t r = *src++; uint8_t a = *src++; *dst++ = r; *dst++ = g; *dst++ = b; *dst++ = a; } } } break; case AV_PIX_FMT_BGR24: { // BGR24转RGBA for (int y = 0; y < frame->height; y++) { uint8_t* src = frame->data[0] + y * frame->linesize[0]; uint8_t* dst = m_frameData + y * m_frameWidth * 4; for (int x = 0; x < frame->width; x++) { uint8_t b = *src++; uint8_t g = *src++; uint8_t r = *src++; *dst++ = r; // R *dst++ = g; // G *dst++ = b; // B *dst++ = 255; // A (设为不透明) } } } break; case AV_PIX_FMT_YUV420P: // 添加对YUV420P格式的支持 { // 检查YUV平面数据是否有效 if (!frame->data[1] || !frame->data[2]) return false; // YUV420P转RGBA for (int y = 0; y < frame->height; y++) { uint8_t* dst = m_frameData + y * m_frameWidth * 4; for (int x = 0; x < frame->width; x++) { int Y = frame->data[0][y * frame->linesize[0] + x]; int U = frame->data[1][(y / 2) * frame->linesize[1] + (x / 2)]; int V = frame->data[2][(y / 2) * frame->linesize[2] + (x / 2)]; // YUV转RGB公式 int C = Y - 16; int D = U - 128; int E = V - 128; int R = (298 * C + 409 * E + 128) >> 8; int G = (298 * C - 100 * D - 208 * E + 128) >> 8; int B = (298 * C + 516 * D + 128) >> 8; // 限制RGB值在0-255范围内 R = R < 0 ? 0 : (R > 255 ? 255 : R); G = G < 0 ? 0 : (G > 255 ? 255 : G); B = B < 0 ? 0 : (B > 255 ? 255 : B); *dst++ = R; // R *dst++ = G; // G *dst++ = B; // B *dst++ = 255; // A } } } break; case AV_PIX_FMT_NV12: { // 检查NV12平面数据是否有效 if (!frame->data[1]) return false; // NV12转RGBA for (int y = 0; y < frame->height; y++) { uint8_t* dst = m_frameData + y * m_frameWidth * 4; for (int x = 0; x < frame->width; x++) { int Y = frame->data[0][y * frame->linesize[0] + x]; int U = frame->data[1][(y / 2) * frame->linesize[1] + (x / 2) * 2]; int V = frame->data[1][(y / 2) * frame->linesize[1] + (x / 2) * 2 + 1]; // YUV转RGB公式 int C = Y - 16; int D = U - 128; int E = V - 128; int R = (298 * C + 409 * E + 128) >> 8; int G = (298 * C - 100 * D - 208 * E + 128) >> 8; int B = (298 * C + 516 * D + 128) >> 8; // 限制RGB值在0-255范围内 R = R < 0 ? 0 : (R > 255 ? 255 : R); G = G < 0 ? 0 : (G > 255 ? 255 : G); B = B < 0 ? 0 : (B > 255 ? 255 : B); *dst++ = R; // R *dst++ = G; // G *dst++ = B; // B *dst++ = 255; // A } } } break; default: // 对于其他格式,可以考虑使用FFmpeg的sws_scale函数 qDebug() << "Unsupported pixel format:" << frame->format; return false; } m_frameUpdated = true; update(); return true; } bool AVOpenGLWidget::Render(AVFrame* frame) { if (!m_initialized && isValid()) { makeCurrent(); initializeGL(); doneCurrent(); } if (!frame) { update(); // 仅刷新显示 return true; } bool result = convertFromAVFrame(frame); // 释放传入的AVFrame,因为现在使用QueuedConnection异步调用 // 需要在这里释放内存,避免内存泄漏 av_frame_free(&frame); return result; } void AVOpenGLWidget::clearFrame() { QMutexLocker locker(&m_mutex); m_frameUpdated = false; update(); } void AVOpenGLWidget::setGray(bool on) { if (m_gray != on) { m_gray = on; update(); } } void AVOpenGLWidget::setThreshold(bool on, float value) { if (m_threshold != on || m_thresholdValue != value) { m_threshold = on; m_thresholdValue = value; update(); } } void AVOpenGLWidget::setBlur(bool on, float radius) { if (m_blur != on || m_blurRadius != radius) { m_blur = on; m_blurRadius = radius; update(); } } void AVOpenGLWidget::setReverse(bool on) { if (m_reverse != on) { m_reverse = on; update(); } } void AVOpenGLWidget::setColorReduce(bool on, int level) { if (m_colorReduce != on || m_colorReduceLevel != level) { m_colorReduce = on; m_colorReduceLevel = level; update(); } } void AVOpenGLWidget::setGamma(bool on, float gamma) { if (m_gamma != on || m_gammaValue != gamma) { m_gamma = on; m_gammaValue = gamma; update(); } } void AVOpenGLWidget::setContrastBright(bool on, float contrast, float brightness) { if (m_contrastBright != on || m_contrast != contrast || m_brightness != brightness) { m_contrastBright = on; m_contrast = contrast; m_brightness = brightness; update(); } } void AVOpenGLWidget::setMirror(bool on) { if (m_mirror != on) { m_mirror = on; update(); } } void AVOpenGLWidget::setNoVideoTip(const QString& tip) { if (m_noVideoTip != tip) { m_noVideoTip = tip; updateTipTexture(); update(); } } void AVOpenGLWidget::showEndTip(const QString& tip) { QMutexLocker locker(&m_mutex); m_noVideoTip = tip; if (m_tipTexture) { glDeleteTextures(1, &m_tipTexture); m_tipTexture = 0; } m_frameUpdated = false; if (m_frameData) { delete[] m_frameData; m_frameData = nullptr; } // 重置纹理尺寸缓存,确保下次帧到来时重新定义纹理 m_texWidth = 0; m_texHeight = 0; updateTipTexture(); update(); } void AVOpenGLWidget::onShowYUV(QSharedPointer frame) { if (!frame || frame->getPixelW() <= 0 || frame->getPixelH() <= 0) { return; } QMutexLocker locker(&m_mutex); uint32_t width = frame->getPixelW(); uint32_t height = frame->getPixelH(); // 如果尺寸变化,重新分配内存 if (m_frameWidth != static_cast(width) || m_frameHeight != static_cast(height)) { if (m_frameData) { delete[] m_frameData; m_frameData = nullptr; } m_frameWidth = static_cast(width); m_frameHeight = static_cast(height); // 检查内存分配大小是否合理 size_t dataSize = static_cast(m_frameWidth) * m_frameHeight * 4; if (dataSize > 0 && dataSize < SIZE_MAX / 4) { try { m_frameData = new unsigned char[dataSize]; // RGBA格式 } catch (const std::bad_alloc&) { m_frameData = nullptr; return; } } else { return; } // 尺寸变更,强制重定义纹理 m_texWidth = 0; m_texHeight = 0; } // 确保m_frameData已正确分配 if (!m_frameData) { size_t dataSize = static_cast(m_frameWidth) * m_frameHeight * 4; if (dataSize > 0 && dataSize < SIZE_MAX / 4) { try { m_frameData = new unsigned char[dataSize]; // RGBA格式 } catch (const std::bad_alloc&) { m_frameData = nullptr; return; } } else { return; } } AVPixelFormat fmt = frame->getFormat(); uint8_t* y = frame->getData(0); uint8_t* u = frame->getData(1); uint8_t* v = frame->getData(2); int lsY = frame->getLineSize(0); int lsU = frame->getLineSize(1); int lsV = frame->getLineSize(2); if (!y) return; auto clamp = [](int v) { return v < 0 ? 0 : (v > 255 ? 255 : v); }; switch (fmt) { case AV_PIX_FMT_YUV420P: case AV_PIX_FMT_YUVJ420P: if (!u || !v) return; for (int yy = 0; yy < m_frameHeight; ++yy) { uint8_t* dst = m_frameData + yy * m_frameWidth * 4; const uint8_t* srcY = y + yy * lsY; const uint8_t* srcU = u + (yy / 2) * lsU; const uint8_t* srcV = v + (yy / 2) * lsV; for (int xx = 0; xx < m_frameWidth; ++xx) { int Y = srcY[xx]; int Uc = srcU[xx / 2]; int Vc = srcV[xx / 2]; int C = Y - 16, D = Uc - 128, E = Vc - 128; int R = (298 * C + 409 * E + 128) >> 8; int G = (298 * C - 100 * D - 208 * E + 128) >> 8; int B = (298 * C + 516 * D + 128) >> 8; *dst++ = clamp(R); *dst++ = clamp(G); *dst++ = clamp(B); *dst++ = 255; } } break; case AV_PIX_FMT_YUV422P: case AV_PIX_FMT_YUVJ422P: if (!u || !v) return; for (int yy = 0; yy < m_frameHeight; ++yy) { uint8_t* dst = m_frameData + yy * m_frameWidth * 4; const uint8_t* srcY = y + yy * lsY; const uint8_t* srcU = u + yy * lsU; const uint8_t* srcV = v + yy * lsV; for (int xx = 0; xx < m_frameWidth; ++xx) { int Y = srcY[xx]; int Uc = srcU[xx / 2]; int Vc = srcV[xx / 2]; int C = Y - 16, D = Uc - 128, E = Vc - 128; int R = (298 * C + 409 * E + 128) >> 8; int G = (298 * C - 100 * D - 208 * E + 128) >> 8; int B = (298 * C + 516 * D + 128) >> 8; *dst++ = clamp(R); *dst++ = clamp(G); *dst++ = clamp(B); *dst++ = 255; } } break; case AV_PIX_FMT_YUYV422: // Packed: Y0 U Y1 V for (int yy = 0; yy < m_frameHeight; ++yy) { uint8_t* dst = m_frameData + yy * m_frameWidth * 4; const uint8_t* row = y + yy * lsY; for (int xx = 0; xx < m_frameWidth; xx += 2) { int Y0 = row[xx * 2 + 0]; int Uc = row[xx * 2 + 1]; int Y1 = row[xx * 2 + 2]; int Vc = row[xx * 2 + 3]; // pixel 0 { int C = Y0 - 16, D = Uc - 128, E = Vc - 128; int R = (298 * C + 409 * E + 128) >> 8; int G = (298 * C - 100 * D - 208 * E + 128) >> 8; int B = (298 * C + 516 * D + 128) >> 8; *dst++ = clamp(R); *dst++ = clamp(G); *dst++ = clamp(B); *dst++ = 255; } // pixel 1 shares U/V { int C = Y1 - 16, D = Uc - 128, E = Vc - 128; int R = (298 * C + 409 * E + 128) >> 8; int G = (298 * C - 100 * D - 208 * E + 128) >> 8; int B = (298 * C + 516 * D + 128) >> 8; *dst++ = clamp(R); *dst++ = clamp(G); *dst++ = clamp(B); *dst++ = 255; } } } break; case AV_PIX_FMT_UYVY422: // Packed: U Y0 V Y1 for (int yy = 0; yy < m_frameHeight; ++yy) { uint8_t* dst = m_frameData + yy * m_frameWidth * 4; const uint8_t* row = y + yy * lsY; for (int xx = 0; xx < m_frameWidth; xx += 2) { int Uc = row[xx * 2 + 0]; int Y0 = row[xx * 2 + 1]; int Vc = row[xx * 2 + 2]; int Y1 = row[xx * 2 + 3]; // pixel 0 { int C = Y0 - 16, D = Uc - 128, E = Vc - 128; int R = (298 * C + 409 * E + 128) >> 8; int G = (298 * C - 100 * D - 208 * E + 128) >> 8; int B = (298 * C + 516 * D + 128) >> 8; *dst++ = clamp(R); *dst++ = clamp(G); *dst++ = clamp(B); *dst++ = 255; } // pixel 1 shares U/V { int C = Y1 - 16, D = Uc - 128, E = Vc - 128; int R = (298 * C + 409 * E + 128) >> 8; int G = (298 * C - 100 * D - 208 * E + 128) >> 8; int B = (298 * C + 516 * D + 128) >> 8; *dst++ = clamp(R); *dst++ = clamp(G); *dst++ = clamp(B); *dst++ = 255; } } } break; case AV_PIX_FMT_YUV444P: if (!u || !v) return; for (int yy = 0; yy < m_frameHeight; ++yy) { uint8_t* dst = m_frameData + yy * m_frameWidth * 4; const uint8_t* srcY = y + yy * lsY; const uint8_t* srcU = u + yy * lsU; const uint8_t* srcV = v + yy * lsV; for (int xx = 0; xx < m_frameWidth; ++xx) { int Y = srcY[xx]; int Uc = srcU[xx]; int Vc = srcV[xx]; int C = Y - 16, D = Uc - 128, E = Vc - 128; int R = (298 * C + 409 * E + 128) >> 8; int G = (298 * C - 100 * D - 208 * E + 128) >> 8; int B = (298 * C + 516 * D + 128) >> 8; *dst++ = clamp(R); *dst++ = clamp(G); *dst++ = clamp(B); *dst++ = 255; } } break; case AV_PIX_FMT_NV12: // Y + UV interleaved if (!u) return; for (int yy = 0; yy < m_frameHeight; ++yy) { uint8_t* dst = m_frameData + yy * m_frameWidth * 4; const uint8_t* srcY = y + yy * lsY; const uint8_t* srcUV = u + (yy / 2) * lsU; for (int xx = 0; xx < m_frameWidth; ++xx) { int Y = srcY[xx]; int Uc = srcUV[(xx / 2) * 2 + 0]; int Vc = srcUV[(xx / 2) * 2 + 1]; int C = Y - 16, D = Uc - 128, E = Vc - 128; int R = (298 * C + 409 * E + 128) >> 8; int G = (298 * C - 100 * D - 208 * E + 128) >> 8; int B = (298 * C + 516 * D + 128) >> 8; *dst++ = clamp(R); *dst++ = clamp(G); *dst++ = clamp(B); *dst++ = 255; } } break; case AV_PIX_FMT_NV21: // Y + VU interleaved if (!u) return; for (int yy = 0; yy < m_frameHeight; ++yy) { uint8_t* dst = m_frameData + yy * m_frameWidth * 4; const uint8_t* srcY = y + yy * lsY; const uint8_t* srcVU = u + (yy / 2) * lsU; for (int xx = 0; xx < m_frameWidth; ++xx) { int Y = srcY[xx]; int Vc = srcVU[(xx / 2) * 2 + 0]; int Uc = srcVU[(xx / 2) * 2 + 1]; int C = Y - 16, D = Uc - 128, E = Vc - 128; int R = (298 * C + 409 * E + 128) >> 8; int G = (298 * C - 100 * D - 208 * E + 128) >> 8; int B = (298 * C + 516 * D + 128) >> 8; *dst++ = clamp(R); *dst++ = clamp(G); *dst++ = clamp(B); *dst++ = 255; } } break; case AV_PIX_FMT_GRAY8: for (int yy = 0; yy < m_frameHeight; ++yy) { uint8_t* dst = m_frameData + yy * m_frameWidth * 4; const uint8_t* srcY = y + yy * lsY; for (int xx = 0; xx < m_frameWidth; ++xx) { int Y = srcY[xx]; *dst++ = Y; *dst++ = Y; *dst++ = Y; *dst++ = 255; } } break; case AV_PIX_FMT_RGB24: for (int yy = 0; yy < m_frameHeight; ++yy) { uint8_t* dst = m_frameData + yy * m_frameWidth * 4; const uint8_t* src = y + yy * lsY; for (int xx = 0; xx < m_frameWidth; ++xx) { uint8_t r = src[xx * 3 + 0]; uint8_t g = src[xx * 3 + 1]; uint8_t b = src[xx * 3 + 2]; *dst++ = r; *dst++ = g; *dst++ = b; *dst++ = 255; } } break; case AV_PIX_FMT_RGBA: for (int yy = 0; yy < m_frameHeight; ++yy) { uint8_t* dst = m_frameData + yy * m_frameWidth * 4; const uint8_t* src = y + yy * lsY; memcpy(dst, src, m_frameWidth * 4); } break; default: // 不支持的格式:尝试走现有 convertFromAVFrame 流程(如果可用),否则返回 // 这里无法拿到 AVFrame*,先简单忽略 return; } m_frameUpdated = true; update(); }