Parcourir la source

测试基础功能

zhuizhu il y a 7 mois
Parent
commit
424e0281e0

+ 1 - 0
.gitignore

@@ -20,3 +20,4 @@
 /ci_build
 /.vscode
 /bin
+/libs/AVPlayer/SDL2-2.26.5

+ 1 - 1
LearningSmartClient.pro

@@ -6,7 +6,7 @@ CONFIG += console
 CONFIG += c++17
 LIBS += -lshell32
 QT += opengl
- QT += concurrent
+QT += concurrent
 # You can make your code fail to compile if it uses deprecated APIs.
 # In order to do so, uncomment the following line.
 #DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000    # disables all the APIs deprecated before Qt 6.0.0

+ 32 - 0
libs/AVPlayer/AvPlayer.pri

@@ -0,0 +1,32 @@
+
+INCLUDEPATH += $$PWD/Sonic
+HEADERS += \
+    $$PWD/Sonic/sonic.h \
+    $$PWD/av_clock.h \
+    $$PWD/av_decoder.h \
+    $$PWD/av_player.h \
+    $$PWD/avopenglwidget.h \
+    $$PWD/avplayerwidget.h \
+    $$PWD/ffmpeg_compat.h \
+    $$PWD/threadpool.h \
+    $$PWD/vframe.h
+
+SOURCES += \
+    $$PWD/Sonic/sonic.c \
+    $$PWD/av_decoder.cpp \
+    $$PWD/av_player.cpp \
+    $$PWD/avopenglwidget.cpp \
+    $$PWD/avplayerwidget.cpp \
+    $$PWD/threadpool.cpp
+
+
+win32 {
+    INCLUDEPATH += $$PWD/SDL2-2.26.5/include
+
+    LIBS += -L$$PWD/lib \
+            -L$$PWD/SDL2-2.26.5/lib/x64 \
+            -lSDL2 \
+            -lSDL2test \
+            -lDwmapi \
+            -luser32
+}

+ 1354 - 0
libs/AVPlayer/Sonic/sonic.c

@@ -0,0 +1,1354 @@
+/* Sonic library
+   Copyright 2010
+   Bill Cox
+   This file is part of the Sonic Library.
+   This file is licensed under the Apache 2.0 license, and also placed into the public domain.
+   Use it either way, at your option.
+*/
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <stdarg.h>
+#include <limits.h>
+#include <math.h>
+#include "sonic.h"
+//#include "webrtc/base/logging.h"
+#ifndef M_PI
+#define M_PI 3.14159265358979323846
+#endif
+
+/*
+    The following code was used to generate the following sinc lookup table.
+    #include <math.h>
+    #include <limits.h>
+    #include <stdio.h>
+
+    double findHannWeight(int N, double x) {
+        return 0.5*(1.0 - cos(2*M_PI*x/N));
+    }
+
+    double findSincCoefficient(int N, double x) {
+        double hannWindowWeight = findHannWeight(N, x);
+        double sincWeight;
+
+        x -= N/2.0;
+        if (x > 1e-9 || x < -1e-9) {
+            sincWeight = sin(M_PI*x)/(M_PI*x);
+        } else {
+            sincWeight = 1.0;
+        }
+        return hannWindowWeight*sincWeight;
+    }
+
+    int main() {
+        double x;
+        int i;
+        int N = 12;
+
+        for (i = 0, x = 0.0; x <= N; x += 0.02, i++) {
+            printf("%u %d\n", i, (int)(SHRT_MAX*findSincCoefficient(N, x)));
+        }
+        return 0;
+    }
+*/
+
+/* The number of points to use in the sinc FIR filter for resampling. */
+#define SINC_FILTER_POINTS 12 /* I am not able to hear improvement with higher N. */
+#define SINC_TABLE_SIZE 601
+
+/* Lookup table for windowed sinc function of SINC_FILTER_POINTS points. */
+static short sincTable[SINC_TABLE_SIZE] = {
+    0, 0, 0, 0, 0, 0, 0, -1, -1, -2, -2, -3, -4, -6, -7, -9, -10, -12, -14,
+    -17, -19, -21, -24, -26, -29, -32, -34, -37, -40, -42, -44, -47, -48, -50,
+    -51, -52, -53, -53, -53, -52, -50, -48, -46, -43, -39, -34, -29, -22, -16,
+    -8, 0, 9, 19, 29, 41, 53, 65, 79, 92, 107, 121, 137, 152, 168, 184, 200,
+    215, 231, 247, 262, 276, 291, 304, 317, 328, 339, 348, 357, 363, 369, 372,
+    374, 375, 373, 369, 363, 355, 345, 332, 318, 300, 281, 259, 234, 208, 178,
+    147, 113, 77, 39, 0, -41, -85, -130, -177, -225, -274, -324, -375, -426,
+    -478, -530, -581, -632, -682, -731, -779, -825, -870, -912, -951, -989,
+    -1023, -1053, -1080, -1104, -1123, -1138, -1149, -1154, -1155, -1151,
+    -1141, -1125, -1105, -1078, -1046, -1007, -963, -913, -857, -796, -728,
+    -655, -576, -492, -403, -309, -210, -107, 0, 111, 225, 342, 462, 584, 708,
+    833, 958, 1084, 1209, 1333, 1455, 1575, 1693, 1807, 1916, 2022, 2122, 2216,
+    2304, 2384, 2457, 2522, 2579, 2625, 2663, 2689, 2706, 2711, 2705, 2687,
+    2657, 2614, 2559, 2491, 2411, 2317, 2211, 2092, 1960, 1815, 1658, 1489,
+    1308, 1115, 912, 698, 474, 241, 0, -249, -506, -769, -1037, -1310, -1586,
+    -1864, -2144, -2424, -2703, -2980, -3254, -3523, -3787, -4043, -4291,
+    -4529, -4757, -4972, -5174, -5360, -5531, -5685, -5819, -5935, -6029,
+    -6101, -6150, -6175, -6175, -6149, -6096, -6015, -5905, -5767, -5599,
+    -5401, -5172, -4912, -4621, -4298, -3944, -3558, -3141, -2693, -2214,
+    -1705, -1166, -597, 0, 625, 1277, 1955, 2658, 3386, 4135, 4906, 5697, 6506,
+    7332, 8173, 9027, 9893, 10769, 11654, 12544, 13439, 14335, 15232, 16128,
+    17019, 17904, 18782, 19649, 20504, 21345, 22170, 22977, 23763, 24527,
+    25268, 25982, 26669, 27327, 27953, 28547, 29107, 29632, 30119, 30569,
+    30979, 31349, 31678, 31964, 32208, 32408, 32565, 32677, 32744, 32767,
+    32744, 32677, 32565, 32408, 32208, 31964, 31678, 31349, 30979, 30569,
+    30119, 29632, 29107, 28547, 27953, 27327, 26669, 25982, 25268, 24527,
+    23763, 22977, 22170, 21345, 20504, 19649, 18782, 17904, 17019, 16128,
+    15232, 14335, 13439, 12544, 11654, 10769, 9893, 9027, 8173, 7332, 6506,
+    5697, 4906, 4135, 3386, 2658, 1955, 1277, 625, 0, -597, -1166, -1705,
+    -2214, -2693, -3141, -3558, -3944, -4298, -4621, -4912, -5172, -5401,
+    -5599, -5767, -5905, -6015, -6096, -6149, -6175, -6175, -6150, -6101,
+    -6029, -5935, -5819, -5685, -5531, -5360, -5174, -4972, -4757, -4529,
+    -4291, -4043, -3787, -3523, -3254, -2980, -2703, -2424, -2144, -1864,
+    -1586, -1310, -1037, -769, -506, -249, 0, 241, 474, 698, 912, 1115, 1308,
+    1489, 1658, 1815, 1960, 2092, 2211, 2317, 2411, 2491, 2559, 2614, 2657,
+    2687, 2705, 2711, 2706, 2689, 2663, 2625, 2579, 2522, 2457, 2384, 2304,
+    2216, 2122, 2022, 1916, 1807, 1693, 1575, 1455, 1333, 1209, 1084, 958, 833,
+    708, 584, 462, 342, 225, 111, 0, -107, -210, -309, -403, -492, -576, -655,
+    -728, -796, -857, -913, -963, -1007, -1046, -1078, -1105, -1125, -1141,
+    -1151, -1155, -1154, -1149, -1138, -1123, -1104, -1080, -1053, -1023, -989,
+    -951, -912, -870, -825, -779, -731, -682, -632, -581, -530, -478, -426,
+    -375, -324, -274, -225, -177, -130, -85, -41, 0, 39, 77, 113, 147, 178,
+    208, 234, 259, 281, 300, 318, 332, 345, 355, 363, 369, 373, 375, 374, 372,
+    369, 363, 357, 348, 339, 328, 317, 304, 291, 276, 262, 247, 231, 215, 200,
+    184, 168, 152, 137, 121, 107, 92, 79, 65, 53, 41, 29, 19, 9, 0, -8, -16,
+    -22, -29, -34, -39, -43, -46, -48, -50, -52, -53, -53, -53, -52, -51, -50,
+    -48, -47, -44, -42, -40, -37, -34, -32, -29, -26, -24, -21, -19, -17, -14,
+    -12, -10, -9, -7, -6, -4, -3, -2, -2, -1, -1, 0, 0, 0, 0, 0, 0, 0
+};
+
+struct sonicStreamStruct {
+    short *inputBuffer;
+    short *outputBuffer;
+    short *pitchBuffer;
+    short *downSampleBuffer;
+    float speed;
+    float volume;
+    float pitch;
+    float rate;
+    int oldRatePosition;
+    int newRatePosition;
+    int useChordPitch;
+    int quality;
+    int numChannels;
+    int inputBufferSize;
+    int pitchBufferSize;
+    int outputBufferSize;
+    int numInputSamples;
+    int numOutputSamples;
+    int numPitchSamples;
+    int minPeriod;
+    int maxPeriod;
+    int maxRequired;
+    int remainingInputToCopy;
+    int sampleRate;
+    int prevPeriod;
+    int prevMinDiff;
+    float avePower;
+};
+
+/* Scale the samples by the factor. */
+// 改变音量
+static void scaleSamples(
+    short *samples,
+    int numSamples,
+    float volume)
+{
+    int fixedPointVolume = volume*4096.0f;
+    int value;
+
+    while(numSamples--) {
+        value = (*samples*fixedPointVolume) >> 12;
+        if(value > 32767) {
+            value = 32767;
+        } else if(value < -32767) {
+            value = -32767;
+        }
+        *samples++ = value;
+    }
+}
+
+/* Get the speed of the stream. */
+// 得到流的速度
+float sonicGetSpeed(
+    sonicStream stream)
+{
+    return stream->speed;
+}
+
+/* Set the speed of the stream. */
+// 设置流的速度
+void sonicSetSpeed(
+    sonicStream stream,
+    float speed)
+{
+    stream->speed = speed;
+}
+
+/* Get the pitch of the stream. */
+// 得到流的音调
+float sonicGetPitch(
+    sonicStream stream)
+{
+    return stream->pitch;
+}
+
+/* Set the pitch of the stream. */
+// 设置流的音调
+void sonicSetPitch(
+    sonicStream stream,
+    float pitch)
+{
+    stream->pitch = pitch;
+}
+
+/* Get the rate of the stream. */
+// 得到流的速率
+float sonicGetRate(
+    sonicStream stream)
+{
+    return stream->rate;
+}
+
+/* Set the playback rate of the stream. This scales pitch and speed at the same time. */
+// 设置回放流的速率,同时也重设pitch和speed
+void sonicSetRate(
+    sonicStream stream,
+    float rate)
+{
+    stream->rate = rate;
+
+    stream->oldRatePosition = 0;
+    stream->newRatePosition = 0;
+}
+
+/* Get the vocal chord pitch setting. */
+//
+int sonicGetChordPitch(
+    sonicStream stream)
+{
+    return stream->useChordPitch;
+}
+
+/* Set the vocal chord mode for pitch computation.  Default is off. */
+void sonicSetChordPitch(
+    sonicStream stream,
+    int useChordPitch)
+{
+    stream->useChordPitch = useChordPitch;
+}
+
+/* Get the quality setting. */
+int sonicGetQuality(
+    sonicStream stream)
+{
+    return stream->quality;
+}
+
+/* Set the "quality".  Default 0 is virtually as good as 1, but very much faster. */
+void sonicSetQuality(
+    sonicStream stream,
+    int quality)
+{
+    stream->quality = quality;
+}
+
+/* Get the scaling factor of the stream. */
+float sonicGetVolume(
+    sonicStream stream)
+{
+    return stream->volume;
+}
+
+/* Set the scaling factor of the stream. */
+// 设置流的音量
+void sonicSetVolume(
+    sonicStream stream,
+    float volume)
+{
+    stream->volume = volume;
+}
+
+/* Free stream buffers. */
+// 释放流内的缓冲区
+static void freeStreamBuffers(
+    sonicStream stream)
+{
+    if(stream->inputBuffer != NULL) {
+        free(stream->inputBuffer);
+    }
+    if(stream->outputBuffer != NULL) {
+        free(stream->outputBuffer);
+    }
+    if(stream->pitchBuffer != NULL) {
+        free(stream->pitchBuffer);
+    }
+    if(stream->downSampleBuffer != NULL) {
+        free(stream->downSampleBuffer);
+    }
+}
+
+/* Destroy the sonic stream. */
+// 销毁流
+void sonicDestroyStream(
+    sonicStream stream)
+{
+    freeStreamBuffers(stream);
+    free(stream);
+}
+
+/* Allocate stream buffers. */
+/**
+ * 开辟流的数据缓存空间
+ * stream 流
+ * sampleRate 采样率
+ * numChnnels 声道数
+ */
+static int allocateStreamBuffers(
+    sonicStream stream,
+    int sampleRate,
+    int numChannels)
+{   // 最小的pitch周期 44100/400 = 110
+    int minPeriod = sampleRate/SONIC_MAX_PITCH;
+    // 最大的pitch周期 44100/65 = 678 个采样点
+    int maxPeriod = sampleRate/SONIC_MIN_PITCH;
+    // 最大 1356
+    int maxRequired = 2*maxPeriod;
+    // 输入缓冲区的大小 = maxRequired
+    stream->inputBufferSize = maxRequired;
+    // 为inputBuffer开辟空间并初始化为0
+    stream->inputBuffer = (short *)calloc(maxRequired, sizeof(short)*numChannels);
+    // 如果开辟失败返回0
+    if(stream->inputBuffer == NULL) {
+        sonicDestroyStream(stream);
+        return 0;
+    }
+    // 输出缓冲区的大小= maxRequired
+    stream->outputBufferSize = maxRequired;
+    // 为oututBUffer开辟空间
+    stream->outputBuffer = (short *)calloc(maxRequired, sizeof(short)*numChannels);
+    if(stream->outputBuffer == NULL) {
+        sonicDestroyStream(stream);
+        return 0;
+    }
+    // 为pitchBuffer开辟空间
+    stream->pitchBufferSize = maxRequired;
+    stream->pitchBuffer = (short *)calloc(maxRequired, sizeof(short)*numChannels);
+    if(stream->pitchBuffer == NULL) {
+        sonicDestroyStream(stream);
+        return 0;
+    }
+    // 为downSampleBuffer(降采样)开辟空间
+    stream->downSampleBuffer = (short *)calloc(maxRequired, sizeof(short));
+    if(stream->downSampleBuffer == NULL) {
+        sonicDestroyStream(stream);
+        return 0;
+    }
+    // 初始化各项参数
+    stream->sampleRate = sampleRate;
+    stream->numChannels = numChannels;
+    stream->oldRatePosition = 0;
+    stream->newRatePosition = 0;
+    stream->minPeriod = minPeriod;
+    stream->maxPeriod = maxPeriod;
+    stream->maxRequired = maxRequired;
+    stream->prevPeriod = 0;
+    return 1;
+}
+
+/* Create a sonic stream.  Return NULL only if we are out of memory and cannot
+   allocate the stream. */
+// 创建一个音频流
+sonicStream sonicCreateStream(
+    int sampleRate,
+    int numChannels)
+{
+    // 开辟一个sonicStreamStruct大小的空间
+    sonicStream stream = (sonicStream)calloc(1, sizeof(struct sonicStreamStruct));
+    // 如果流为空,证明开辟失败
+    if(stream == NULL) {
+        return NULL;
+    }
+    if(!allocateStreamBuffers(stream, sampleRate, numChannels)) {
+        return NULL;
+    }
+    // 初始化各项参数
+    stream->speed = 1.0f;
+    stream->pitch = 1.0f;
+    stream->volume = 1.0f;
+    stream->rate = 1.0f;
+    stream->oldRatePosition = 0;
+    stream->newRatePosition = 0;
+    stream->useChordPitch = 0;
+    stream->quality = 0;
+    stream->avePower = 50.0f;
+    return stream;
+}
+
+/* Get the sample rate of the stream. */
+// 取得流的采样率
+int sonicGetSampleRate(
+    sonicStream stream)
+{
+    return stream->sampleRate;
+}
+
+/* Set the sample rate of the stream.  This will cause samples buffered in the stream to
+   be lost. */
+// 设置流的采样率,可能使流中的已经缓冲的数据丢失
+void sonicSetSampleRate(
+    sonicStream stream,
+    int sampleRate)
+{
+    freeStreamBuffers(stream);
+    allocateStreamBuffers(stream, sampleRate, stream->numChannels);
+}
+
+/* Get the number of channels. */
+// 取得流的声道的数量
+int sonicGetNumChannels(
+    sonicStream stream)
+{
+    return stream->numChannels;
+}
+
+/* Set the num channels of the stream.  This will cause samples buffered in the stream to
+   be lost. */
+// 设置流的声道数量,可能造成流中已缓存的额数据的丢失
+void sonicSetNumChannels(
+    sonicStream stream,
+    int numChannels)
+{
+    freeStreamBuffers(stream);
+    allocateStreamBuffers(stream, stream->sampleRate, numChannels);
+}
+
+/* Enlarge the output buffer if needed. */
+// 根据需要扩大输出缓冲区
+static int enlargeOutputBufferIfNeeded(
+    sonicStream stream,
+    int numSamples)
+{
+    if(stream->numOutputSamples + numSamples > stream->outputBufferSize) {
+        stream->outputBufferSize += (stream->outputBufferSize >> 1) + numSamples;
+        stream->outputBuffer = (short *)realloc(stream->outputBuffer,
+            stream->outputBufferSize*sizeof(short)*stream->numChannels);
+        if(stream->outputBuffer == NULL) {
+            return 0;
+        }
+    }
+    return 1;
+}
+
+/* Enlarge the input buffer if needed. */
+// 如果需要的话增大输入缓冲区
+static int enlargeInputBufferIfNeeded(
+    sonicStream stream,
+    int numSamples)
+{
+    // 流中已经有的采样数据的大小 + 新的采样点个数
+    if(stream->numInputSamples + numSamples > stream->inputBufferSize) {
+        stream->inputBufferSize += (stream->inputBufferSize >> 1) + numSamples;
+        // 重新设置内存空间的大小
+        stream->inputBuffer = (short *)realloc(stream->inputBuffer,
+            stream->inputBufferSize*sizeof(short)*stream->numChannels);
+        if(stream->inputBuffer == NULL) {
+            return 0;
+        }
+    }
+    return 1;
+}
+
+/* Add the input samples to the input buffer. */
+// 向流的输入缓冲区中写入float格式的采样数据
+static int addFloatSamplesToInputBuffer(
+    sonicStream stream,
+    float *samples,
+    int numSamples)
+{
+    short *buffer;
+    int count = numSamples*stream->numChannels;
+
+    if(numSamples == 0) {
+        return 1;
+    }
+    if(!enlargeInputBufferIfNeeded(stream, numSamples)) {
+        return 0;
+    }
+    buffer = stream->inputBuffer + stream->numInputSamples*stream->numChannels;
+    while(count--) {
+        *buffer++ = (*samples++)*32767.0f;
+    }
+    stream->numInputSamples += numSamples;
+    return 1;
+}
+
+/* Add the input samples to the input buffer. */
+// 向流的输入缓冲区中写入short类型的数据
+static int addShortSamplesToInputBuffer(
+    sonicStream stream,
+    short *samples,
+    int numSamples)
+{
+    if(numSamples == 0) {
+        return 1;
+    }
+    if(!enlargeInputBufferIfNeeded(stream, numSamples)) {
+        return 0;
+    }
+    // 向输入缓冲区拷贝数据,重设numInputSamples大小
+    memcpy(stream->inputBuffer + stream->numInputSamples*stream->numChannels, samples,
+        numSamples*sizeof(short)*stream->numChannels);
+    stream->numInputSamples += numSamples;
+    return 1;
+}
+
+/* Add the input samples to the input buffer. */
+// 向流的输如缓冲区中写入unsigned格式的采样数据
+static int addUnsignedCharSamplesToInputBuffer(
+    sonicStream stream,
+    unsigned char *samples,
+    int numSamples)
+{
+    short *buffer;
+    int count = numSamples*stream->numChannels;
+
+    if(numSamples == 0) {
+        return 1;
+    }
+    if(!enlargeInputBufferIfNeeded(stream, numSamples)) {
+        return 0;
+    }
+    buffer = stream->inputBuffer + stream->numInputSamples*stream->numChannels;
+    while(count--) {
+        *buffer++ = (*samples++ - 128) << 8;
+    }
+    stream->numInputSamples += numSamples;
+    return 1;
+}
+
+/* Remove input samples that we have already processed. */
+// 移除已经处理过的输入缓冲区中的数据
+static void removeInputSamples(
+    sonicStream stream,
+    int position)
+{
+    int remainingSamples = stream->numInputSamples - position;
+
+    if(remainingSamples > 0) {
+        memmove(stream->inputBuffer, stream->inputBuffer + position*stream->numChannels,
+            remainingSamples*sizeof(short)*stream->numChannels);
+    }
+    stream->numInputSamples = remainingSamples;
+}
+
+/* Just copy from the array to the output buffer */
+// 拷贝数组到输出缓冲区
+static int copyToOutput(
+    sonicStream stream,
+    short *samples,
+    int numSamples)
+{
+    if(!enlargeOutputBufferIfNeeded(stream, numSamples)) {
+        return 0;
+    }
+    memcpy(stream->outputBuffer + stream->numOutputSamples*stream->numChannels,
+        samples, numSamples*sizeof(short)*stream->numChannels);
+    stream->numOutputSamples += numSamples;
+    return 1;
+}
+
+/* Just copy from the input buffer to the output buffer.  Return 0 if we fail to
+   resize the output buffer.  Otherwise, return numSamples */
+// 仅仅把输入缓冲区中的数据拷贝到输出缓冲区中,返回转移了的采样点的个数
+// position表示偏移量
+static int copyInputToOutput(
+    sonicStream stream,
+    int position)
+{
+    int numSamples = stream->remainingInputToCopy;
+    //
+    if(numSamples > stream->maxRequired) {
+        numSamples = stream->maxRequired;
+    }
+    if(!copyToOutput(stream, stream->inputBuffer + position*stream->numChannels,
+            numSamples)) {
+        return 0;
+    }
+    // 剩余需要拷贝的输入缓冲区的采样点数
+    stream->remainingInputToCopy -= numSamples;
+    return numSamples;
+}
+
+/* Read data out of the stream.  Sometimes no data will be available, and zero
+   is returned, which is not an error condition. */
+int sonicReadFloatFromStream(
+    sonicStream stream,
+    float *samples,
+    int maxSamples)
+{
+    int numSamples = stream->numOutputSamples;
+    int remainingSamples = 0;
+    short *buffer;
+    int count;
+
+    if(numSamples == 0) {
+        return 0;
+    }
+    if(numSamples > maxSamples) {
+        remainingSamples = numSamples - maxSamples;
+        numSamples = maxSamples;
+    }
+    buffer = stream->outputBuffer;
+    count = numSamples*stream->numChannels;
+    while(count--) {
+        *samples++ = (*buffer++)/32767.0f;
+    }
+    if(remainingSamples > 0) {
+        memmove(stream->outputBuffer, stream->outputBuffer + numSamples*stream->numChannels,
+            remainingSamples*sizeof(short)*stream->numChannels);
+    }
+    stream->numOutputSamples = remainingSamples;
+    return numSamples;
+}
+
+/* Read short data out of the stream.  Sometimes no data will be available, and zero
+   is returned, which is not an error condition. */
+// 从流中读取short类型的数据,如果没有数据返回0
+int sonicReadShortFromStream(
+    sonicStream stream,
+    short *samples,
+    int maxSamples)
+{
+    int numSamples = stream->numOutputSamples;
+    int remainingSamples = 0;
+
+    if(numSamples == 0) {
+        return 0;
+    }
+    if(numSamples > maxSamples) {
+        remainingSamples = numSamples - maxSamples;
+        numSamples = maxSamples;
+    }
+    memcpy(samples, stream->outputBuffer, numSamples*sizeof(short)*stream->numChannels);
+    if(remainingSamples > 0) {
+        memmove(stream->outputBuffer, stream->outputBuffer + numSamples*stream->numChannels,
+            remainingSamples*sizeof(short)*stream->numChannels);
+    }
+    stream->numOutputSamples = remainingSamples;
+    return numSamples;
+}
+
+/* Read unsigned char data out of the stream.  Sometimes no data will be available, and zero
+   is returned, which is not an error condition. */
+int sonicReadUnsignedCharFromStream(
+    sonicStream stream,
+    unsigned char *samples,
+    int maxSamples)
+{
+    int numSamples = stream->numOutputSamples;
+    int remainingSamples = 0;
+    short *buffer;
+    int count;
+
+    if(numSamples == 0) {
+        return 0;
+    }
+    if(numSamples > maxSamples) {
+        remainingSamples = numSamples - maxSamples;
+        numSamples = maxSamples;
+    }
+    buffer = stream->outputBuffer;
+    count = numSamples*stream->numChannels;
+    while(count--) {
+        *samples++ = (char)((*buffer++) >> 8) + 128;
+    }
+    if(remainingSamples > 0) {
+        memmove(stream->outputBuffer, stream->outputBuffer + numSamples*stream->numChannels,
+            remainingSamples*sizeof(short)*stream->numChannels);
+    }
+    stream->numOutputSamples = remainingSamples;
+    return numSamples;
+}
+
+/* Force the sonic stream to generate output using whatever data it currently
+   has.  No extra delay will be added to the output, but flushing in the middle of
+   words could introduce distortion. */
+int sonicFlushStream(
+    sonicStream stream)
+{
+    int maxRequired = stream->maxRequired;
+    int remainingSamples = stream->numInputSamples;
+    float speed = stream->speed/stream->pitch;
+    float rate = stream->rate*stream->pitch;
+    int expectedOutputSamples = stream->numOutputSamples +
+        (int)((remainingSamples/speed + stream->numPitchSamples)/rate + 0.5f);
+
+    /* Add enough silence to flush both input and pitch buffers. */
+    if(!enlargeInputBufferIfNeeded(stream, remainingSamples + 2*maxRequired)) {
+        return 0;
+    }
+    memset(stream->inputBuffer + remainingSamples*stream->numChannels, 0,
+        2*maxRequired*sizeof(short)*stream->numChannels);
+    stream->numInputSamples += 2*maxRequired;
+    if(!sonicWriteShortToStream(stream, NULL, 0)) {
+        return 0;
+    }
+    /* Throw away any extra samples we generated due to the silence we added */
+    if(stream->numOutputSamples > expectedOutputSamples) {
+        stream->numOutputSamples = expectedOutputSamples;
+    }
+    /* Empty input and pitch buffers */
+    stream->numInputSamples = 0;
+    stream->remainingInputToCopy = 0;
+    stream->numPitchSamples = 0;
+    return 1;
+}
+
+/* Return the number of samples in the output buffer */
+int sonicSamplesAvailable(
+   sonicStream stream)
+{
+    return stream->numOutputSamples;
+}
+
+/* If skip is greater than one, average skip samples together and write them to
+   the down-sample buffer.  If numChannels is greater than one, mix the channels
+   together as we down sample. */
+static void downSampleInput(
+    sonicStream stream,
+    short *samples,
+    int skip)
+{
+    int numSamples = stream->maxRequired/skip;
+    int samplesPerValue = stream->numChannels*skip;
+    int i, j;
+    int value;
+    short *downSamples = stream->downSampleBuffer;
+
+    for(i = 0; i < numSamples; i++) {
+        value = 0;
+        for(j = 0; j < samplesPerValue; j++) {
+            value += *samples++;
+        }
+        value /= samplesPerValue;
+        *downSamples++ = value;
+    }
+}
+
+/* Find the best frequency match in the range, and given a sample skip multiple.
+   For now, just find the pitch of the first channel. */
+static int findPitchPeriodInRange(
+    short *samples,
+    int minPeriod,
+    int maxPeriod,
+    int *retMinDiff,
+    int *retMaxDiff)
+{
+    int period, bestPeriod = 0, worstPeriod = 255;
+    short *s, *p, sVal, pVal;
+    unsigned long diff, minDiff = 1, maxDiff = 0;
+    int i;
+
+    for(period = minPeriod; period <= maxPeriod; period++) {
+        diff = 0;
+        s = samples;
+        p = samples + period;
+        for(i = 0; i < period; i++) {
+            sVal = *s++;
+            pVal = *p++;
+            diff += sVal >= pVal? (unsigned short)(sVal - pVal) :
+                (unsigned short)(pVal - sVal);
+        }
+        /* Note that the highest number of samples we add into diff will be less
+           than 256, since we skip samples.  Thus, diff is a 24 bit number, and
+           we can safely multiply by numSamples without overflow */
+        /* if (bestPeriod == 0 || (bestPeriod*3/2 > period && diff*bestPeriod < minDiff*period) ||
+                diff*bestPeriod < (minDiff >> 1)*period) {*/
+        if (bestPeriod == 0 || diff*bestPeriod < minDiff*period) {
+            minDiff = diff;
+            bestPeriod = period;
+        }
+        if(diff*worstPeriod > maxDiff*period) {
+            maxDiff = diff;
+            worstPeriod = period;
+        }
+    }
+    *retMinDiff = minDiff/bestPeriod;
+    *retMaxDiff = maxDiff/worstPeriod;
+    return bestPeriod;
+}
+
+/* At abrupt ends of voiced words, we can have pitch periods that are better
+   approximated by the previous pitch period estimate.  Try to detect this case. */
+static int prevPeriodBetter(
+    sonicStream stream,
+    int period,
+    int minDiff,
+    int maxDiff,
+    int preferNewPeriod)
+{
+    if(minDiff == 0 || stream->prevPeriod == 0) {
+        return 0;
+    }
+    if(preferNewPeriod) {
+        if(maxDiff > minDiff*3) {
+            /* Got a reasonable match this period */
+            return 0;
+        }
+        if(minDiff*2 <= stream->prevMinDiff*3) {
+            /* Mismatch is not that much greater this period */
+            return 0;
+        }
+    } else {
+        if(minDiff <= stream->prevMinDiff) {
+            return 0;
+        }
+    }
+    return 1;
+}
+
+/* Find the pitch period.  This is a critical step, and we may have to try
+   multiple ways to get a good answer.  This version uses Average Magnitude
+   Difference Function (AMDF).  To improve speed, we down sample by an integer
+   factor get in the 11KHz range, and then do it again with a narrower
+   frequency range without down sampling */
+static int findPitchPeriod(
+    sonicStream stream,
+    short *samples,
+    int preferNewPeriod)
+{
+    int minPeriod = stream->minPeriod;
+    int maxPeriod = stream->maxPeriod;
+    int sampleRate = stream->sampleRate;
+    int minDiff, maxDiff, retPeriod;
+    int skip = 1;
+    int period;
+
+    if(sampleRate > SONIC_AMDF_FREQ && stream->quality == 0) {
+        skip = sampleRate/SONIC_AMDF_FREQ;
+    }
+    if(stream->numChannels == 1 && skip == 1) {
+        period = findPitchPeriodInRange(samples, minPeriod, maxPeriod, &minDiff, &maxDiff);
+    } else {
+        downSampleInput(stream, samples, skip);
+        period = findPitchPeriodInRange(stream->downSampleBuffer, minPeriod/skip,
+            maxPeriod/skip, &minDiff, &maxDiff);
+        if(skip != 1) {
+            period *= skip;
+            minPeriod = period - (skip << 2);
+            maxPeriod = period + (skip << 2);
+            if(minPeriod < stream->minPeriod) {
+                minPeriod = stream->minPeriod;
+            }
+            if(maxPeriod > stream->maxPeriod) {
+                maxPeriod = stream->maxPeriod;
+            }
+            if(stream->numChannels == 1) {
+                period = findPitchPeriodInRange(samples, minPeriod, maxPeriod,
+                    &minDiff, &maxDiff);
+            } else {
+                downSampleInput(stream, samples, 1);
+                period = findPitchPeriodInRange(stream->downSampleBuffer, minPeriod,
+                    maxPeriod, &minDiff, &maxDiff);
+            }
+        }
+    }
+    if(prevPeriodBetter(stream, period, minDiff, maxDiff, preferNewPeriod)) {
+        retPeriod = stream->prevPeriod;
+    } else {
+        retPeriod = period;
+    }
+    stream->prevMinDiff = minDiff;
+    stream->prevPeriod = period;
+    return retPeriod;
+}
+
+/* Overlap two sound segments, ramp the volume of one down, while ramping the
+   other one from zero up, and add them, storing the result at the output. */
+static void overlapAdd(
+    int numSamples,
+    int numChannels,
+    short *out,
+    short *rampDown,
+    short *rampUp)
+{
+    short *o, *u, *d;
+    int i, t;
+
+    for(i = 0; i < numChannels; i++) {
+        o = out + i;
+        u = rampUp + i;
+        d = rampDown + i;
+        for(t = 0; t < numSamples; t++) {
+#ifdef SONIC_USE_SIN
+            float ratio = sin(t*M_PI/(2*numSamples));
+            *o = *d*(1.0f - ratio) + *u*ratio;
+#else
+            *o = (*d*(numSamples - t) + *u*t)/numSamples;
+#endif
+            o += numChannels;
+            d += numChannels;
+            u += numChannels;
+        }
+    }
+}
+
+/* Overlap two sound segments, ramp the volume of one down, while ramping the
+   other one from zero up, and add them, storing the result at the output. */
+static void overlapAddWithSeparation(
+    int numSamples,
+    int numChannels,
+    int separation,
+    short *out,
+    short *rampDown,
+    short *rampUp)
+{
+    short *o, *u, *d;
+    int i, t;
+
+    for(i = 0; i < numChannels; i++) {
+        o = out + i;
+        u = rampUp + i;
+        d = rampDown + i;
+        for(t = 0; t < numSamples + separation; t++) {
+            if(t < separation) {
+                *o = *d*(numSamples - t)/numSamples;
+                d += numChannels;
+            } else if(t < numSamples) {
+                *o = (*d*(numSamples - t) + *u*(t - separation))/numSamples;
+                d += numChannels;
+                u += numChannels;
+            } else {
+                *o = *u*(t - separation)/numSamples;
+                u += numChannels;
+            }
+            o += numChannels;
+        }
+    }
+}
+
+/* Just move the new samples in the output buffer to the pitch buffer */
+static int moveNewSamplesToPitchBuffer(
+    sonicStream stream,
+    int originalNumOutputSamples)
+{
+    int numSamples = stream->numOutputSamples - originalNumOutputSamples;
+    int numChannels = stream->numChannels;
+
+    if(stream->numPitchSamples + numSamples > stream->pitchBufferSize) {
+        stream->pitchBufferSize += (stream->pitchBufferSize >> 1) + numSamples;
+        stream->pitchBuffer = (short *)realloc(stream->pitchBuffer,
+            stream->pitchBufferSize*sizeof(short)*numChannels);
+        if(stream->pitchBuffer == NULL) {
+            return 0;
+        }
+    }
+    memcpy(stream->pitchBuffer + stream->numPitchSamples*numChannels,
+        stream->outputBuffer + originalNumOutputSamples*numChannels,
+        numSamples*sizeof(short)*numChannels);
+    stream->numOutputSamples = originalNumOutputSamples;
+    stream->numPitchSamples += numSamples;
+    return 1;
+}
+
+/* Remove processed samples from the pitch buffer. */
+static void removePitchSamples(
+    sonicStream stream,
+    int numSamples)
+{
+    int numChannels = stream->numChannels;
+    short *source = stream->pitchBuffer + numSamples*numChannels;
+
+    if(numSamples == 0) {
+        return;
+    }
+    if(numSamples != stream->numPitchSamples) {
+        memmove(stream->pitchBuffer, source, (stream->numPitchSamples -
+            numSamples)*sizeof(short)*numChannels);
+    }
+    stream->numPitchSamples -= numSamples;
+}
+
+/* Change the pitch.  The latency this introduces could be reduced by looking at
+   past samples to determine pitch, rather than future. */
+static int adjustPitch(
+    sonicStream stream,
+    int originalNumOutputSamples)
+{
+    float pitch = stream->pitch;
+    int numChannels = stream->numChannels;
+    int period, newPeriod, separation;
+    int position = 0;
+    short *out, *rampDown, *rampUp;
+
+    if(stream->numOutputSamples == originalNumOutputSamples) {
+        return 1;
+    }
+    if(!moveNewSamplesToPitchBuffer(stream, originalNumOutputSamples)) {
+        return 0;
+    }
+    while(stream->numPitchSamples - position >= stream->maxRequired) {
+        period = findPitchPeriod(stream, stream->pitchBuffer + position*numChannels, 0);
+        newPeriod = period/pitch;
+        if(!enlargeOutputBufferIfNeeded(stream, newPeriod)) {
+            return 0;
+        }
+        out = stream->outputBuffer + stream->numOutputSamples*numChannels;
+        if(pitch >= 1.0f) {
+            rampDown = stream->pitchBuffer + position*numChannels;
+            rampUp = stream->pitchBuffer + (position + period - newPeriod)*numChannels;
+            overlapAdd(newPeriod, numChannels, out, rampDown, rampUp);
+        } else {
+            rampDown = stream->pitchBuffer + position*numChannels;
+            rampUp = stream->pitchBuffer + position*numChannels;
+            separation = newPeriod - period;
+            overlapAddWithSeparation(period, numChannels, separation, out, rampDown, rampUp);
+        }
+        stream->numOutputSamples += newPeriod;
+        position += period;
+    }
+    removePitchSamples(stream, position);
+    return 1;
+}
+
+/* Aproximate the sinc function times a Hann window from the sinc table. */
+static int findSincCoefficient(int i, int ratio, int width) {
+    int lobePoints = (SINC_TABLE_SIZE-1)/SINC_FILTER_POINTS;
+    int left = i*lobePoints + (ratio*lobePoints)/width;
+    int right = left + 1;
+    int position = i*lobePoints*width + ratio*lobePoints - left*width;
+    int leftVal = sincTable[left];
+    int rightVal = sincTable[right];
+
+    return ((leftVal*(width - position) + rightVal*position) << 1)/width;
+}
+
+/* Return 1 if value >= 0, else -1.  This represents the sign of value. */
+static int getSign(int value) {
+    return value >= 0? 1 : 0;
+}
+
+/* Interpolate the new output sample. */
+static short interpolate(
+    sonicStream stream,
+    short *in,
+    int oldSampleRate,
+    int newSampleRate)
+{
+    /* Compute N-point sinc FIR-filter here.  Clip rather than overflow. */
+    int i;
+    int total = 0;
+    int position = stream->newRatePosition*oldSampleRate;
+    int leftPosition = stream->oldRatePosition*newSampleRate;
+    int rightPosition = (stream->oldRatePosition + 1)*newSampleRate;
+    int ratio = rightPosition - position - 1;
+    int width = rightPosition - leftPosition;
+    int weight, value;
+    int oldSign;
+    int overflowCount = 0;
+
+    for (i = 0; i < SINC_FILTER_POINTS; i++) {
+        weight = findSincCoefficient(i, ratio, width);
+        /* printf("%u %f\n", i, weight); */
+        value = in[i*stream->numChannels]*weight;
+        oldSign = getSign(total);
+        total += value;
+        if (oldSign != getSign(total) && getSign(value) == oldSign) {
+            /* We must have overflowed.  This can happen with a sinc filter. */
+            overflowCount += oldSign;
+        }
+    }
+    /* It is better to clip than to wrap if there was a overflow. */
+    if (overflowCount > 0) {
+        return SHRT_MAX;
+    } else if (overflowCount < 0) {
+        return SHRT_MIN;
+    }
+    return total >> 16;
+}
+
+/* Change the rate.  Interpolate with a sinc FIR filter using a Hann window. */
+static int adjustRate(
+    sonicStream stream,
+    float rate,
+    int originalNumOutputSamples)
+{
+    int newSampleRate = stream->sampleRate/rate;
+    int oldSampleRate = stream->sampleRate;
+    int numChannels = stream->numChannels;
+    int position = 0;
+    short *in, *out;
+    int i;
+    int N = SINC_FILTER_POINTS;
+
+    /* Set these values to help with the integer math */
+    while(newSampleRate > (1 << 14) || oldSampleRate > (1 << 14)) {
+        newSampleRate >>= 1;
+        oldSampleRate >>= 1;
+    }
+    if(stream->numOutputSamples == originalNumOutputSamples) {
+        return 1;
+    }
+    if(!moveNewSamplesToPitchBuffer(stream, originalNumOutputSamples)) {
+        return 0;
+    }
+    /* Leave at least N pitch sample in the buffer */
+    for(position = 0; position < stream->numPitchSamples - N; position++) {
+        while((stream->oldRatePosition + 1)*newSampleRate >
+                stream->newRatePosition*oldSampleRate) {
+            if(!enlargeOutputBufferIfNeeded(stream, 1)) {
+                return 0;
+            }
+            out = stream->outputBuffer + stream->numOutputSamples*numChannels;
+            in = stream->pitchBuffer + position*numChannels;
+            for(i = 0; i < numChannels; i++) {
+                *out++ = interpolate(stream, in, oldSampleRate, newSampleRate);
+                in++;
+            }
+            stream->newRatePosition++;
+            stream->numOutputSamples++;
+        }
+        stream->oldRatePosition++;
+        if(stream->oldRatePosition == oldSampleRate) {
+            stream->oldRatePosition = 0;
+            if(stream->newRatePosition != newSampleRate) {
+                fprintf(stderr,
+                    "Assertion failed: stream->newRatePosition != newSampleRate\n");
+                exit(1);
+            }
+            stream->newRatePosition = 0;
+        }
+    }
+    removePitchSamples(stream, position);
+    return 1;
+}
+
+/* Skip over a pitch period, and copy period/speed samples to the output */
+static int skipPitchPeriod(
+    sonicStream stream,
+    short *samples,
+    float speed,
+    int period)
+{
+    long newSamples;
+    int numChannels = stream->numChannels;
+
+    if(speed >= 2.0f) {
+        newSamples = period/(speed - 1.0f);
+    } else {
+        newSamples = period;
+        stream->remainingInputToCopy = period*(2.0f - speed)/(speed - 1.0f);
+    }
+    if(!enlargeOutputBufferIfNeeded(stream, newSamples)) {
+        return 0;
+    }
+    overlapAdd(newSamples, numChannels, stream->outputBuffer +
+        stream->numOutputSamples*numChannels, samples, samples + period*numChannels);
+    stream->numOutputSamples += newSamples;
+    return newSamples;
+}
+
+/* Insert a pitch period, and determine how much input to copy directly. */
+static int insertPitchPeriod(
+    sonicStream stream,
+    short *samples,
+    float speed,
+    int period)
+{
+    long newSamples;
+    short *out;
+    int numChannels = stream->numChannels;
+
+    if(speed < 0.5f) {
+        newSamples = period*speed/(1.0f - speed);
+    } else {
+        newSamples = period;
+        stream->remainingInputToCopy = period*(2.0f*speed - 1.0f)/(1.0f - speed);
+    }
+    if(!enlargeOutputBufferIfNeeded(stream, period + newSamples)) {
+        return 0;
+    }
+    out = stream->outputBuffer + stream->numOutputSamples*numChannels;
+    memcpy(out, samples, period*sizeof(short)*numChannels);
+    out = stream->outputBuffer + (stream->numOutputSamples + period)*numChannels;
+    overlapAdd(newSamples, numChannels, out, samples + period*numChannels, samples);
+    stream->numOutputSamples += period + newSamples;
+    return newSamples;
+}
+
+/* Resample as many pitch periods as we have buffered on the input.  Return 0 if
+   we fail to resize an input or output buffer. */
+// 尽可能多的重采样输入缓冲区中的基音周期,如果成功返回非0
+static int changeSpeed(
+    sonicStream stream,
+    float speed)
+{
+    short *samples;
+    int numSamples = stream->numInputSamples;
+    int position = 0, period, newSamples;
+    int maxRequired = stream->maxRequired;
+
+    /* printf("Changing speed to %f\n", speed); */
+    if(stream->numInputSamples < maxRequired) {
+        return 1;
+    }
+    do {
+        // 流中剩余的采样点的个数
+        if(stream->remainingInputToCopy > 0) {
+            //
+            newSamples = copyInputToOutput(stream, position);
+            position += newSamples;
+        } else {
+            samples = stream->inputBuffer + position*stream->numChannels;
+            period = findPitchPeriod(stream, samples, 1);
+            if(speed > 1.0) {
+                newSamples = skipPitchPeriod(stream, samples, speed, period);
+                position += period + newSamples;
+            } else {
+                newSamples = insertPitchPeriod(stream, samples, speed, period);
+                position += newSamples;
+            }
+        }
+        if(newSamples == 0) {
+            return 0; /* Failed to resize output buffer */
+        }
+    } while(position + maxRequired <= numSamples);
+    removeInputSamples(stream, position);
+    return 1;
+}
+
+/* Resample as many pitch periods as we have buffered on the input.  Return 0 if
+   we fail to resize an input or output buffer.  Also scale the output by the volume. */
+// 尽可能多的将输入缓冲区中的基音周期进行重采样,如果失败返回0,如果成功返回1。同时也scale了音量
+static int processStreamInput(
+    sonicStream stream)
+{
+    // 流中输出缓冲区中原有的采样点的数量
+    int originalNumOutputSamples = stream->numOutputSamples;
+    // 速度
+    float speed = stream->speed/stream->pitch;
+    float rate = stream->rate;
+    // 如果不用chordPitch
+    if(!stream->useChordPitch) {
+        rate *= stream->pitch;
+    }
+    // 改变速度
+    if(speed > 1.00001 || speed < 0.99999) {
+        changeSpeed(stream, speed);
+    } else {
+        if(!copyToOutput(stream, stream->inputBuffer, stream->numInputSamples)) {
+            return 0;
+        }
+        stream->numInputSamples = 0;
+    }
+    if(stream->useChordPitch) {
+        if(stream->pitch != 1.0f) {
+            if(!adjustPitch(stream, originalNumOutputSamples)) {
+                return 0;
+            }
+        }
+    } else if(rate != 1.0f) {
+        if(!adjustRate(stream, rate, originalNumOutputSamples)) {
+            return 0;
+        }
+    }
+    if(stream->volume != 1.0f) {
+        /* Adjust output volume. */
+        scaleSamples(stream->outputBuffer + originalNumOutputSamples*stream->numChannels,
+            (stream->numOutputSamples - originalNumOutputSamples)*stream->numChannels,
+            stream->volume);
+    }
+    return 1;
+}
+
+/* Write floating point data to the input buffer and process it. */
+int sonicWriteFloatToStream(
+    sonicStream stream,
+    float *samples,
+    int numSamples)
+{
+    if(!addFloatSamplesToInputBuffer(stream, samples, numSamples)) {
+        return 0;
+    }
+    return processStreamInput(stream);
+}
+
+/* Simple wrapper around sonicWriteFloatToStream that does the short to float
+   conversion for you. */
+   // 向流中写入short类型的数据并进行处理
+int sonicWriteShortToStream(
+    sonicStream stream,
+    short *samples,
+    int numSamples)
+{
+    if(!addShortSamplesToInputBuffer(stream, samples, numSamples)) {
+        return 0;
+    }
+    // 处理输入流的数据
+    return processStreamInput(stream);
+}
+
+/* Simple wrapper around sonicWriteFloatToStream that does the unsigned char to float
+   conversion for you. */
+int sonicWriteUnsignedCharToStream(
+    sonicStream stream,
+    unsigned char *samples,
+    int numSamples)
+{
+    if(!addUnsignedCharSamplesToInputBuffer(stream, samples, numSamples)) {
+        return 0;
+    }
+    return processStreamInput(stream);
+}
+
+/* This is a non-stream oriented interface to just change the speed of a sound sample */
+int sonicChangeFloatSpeed(
+    float *samples,
+    int numSamples,
+    float speed,
+    float pitch,
+    float rate,
+    float volume,
+    int useChordPitch,
+    int sampleRate,
+    int numChannels)
+{
+    sonicStream stream = sonicCreateStream(sampleRate, numChannels);
+
+    sonicSetSpeed(stream, speed);
+    sonicSetPitch(stream, pitch);
+    sonicSetRate(stream, rate);
+    sonicSetVolume(stream, volume);
+    sonicSetChordPitch(stream, useChordPitch);
+    sonicWriteFloatToStream(stream, samples, numSamples);
+    sonicFlushStream(stream);
+    numSamples = sonicSamplesAvailable(stream);
+    sonicReadFloatFromStream(stream, samples, numSamples);
+    sonicDestroyStream(stream);
+    return numSamples;
+}
+
+/* This is a non-stream oriented interface to just change the speed of a sound sample */
+int sonicChangeShortSpeed(
+    short *samples,
+    int numSamples,
+    float speed,
+    float pitch,
+    float rate,
+    float volume,
+    int useChordPitch,
+    int sampleRate,
+    int numChannels)
+{   // 创建并初始化流
+    sonicStream stream = sonicCreateStream(sampleRate, numChannels);
+    // 设置流的速度
+    sonicSetSpeed(stream, speed);
+    // 设置流的音调
+    sonicSetPitch(stream, pitch);
+    // 设置流的速率
+    sonicSetRate(stream, rate);
+    // 设置流的音量
+    sonicSetVolume(stream, volume);
+    // 设置
+    sonicSetChordPitch(stream, useChordPitch);
+    // 向流中写入short类型的数据
+    sonicWriteShortToStream(stream, samples, numSamples);
+    sonicFlushStream(stream);
+    numSamples = sonicSamplesAvailable(stream);
+    sonicReadShortFromStream(stream, samples, numSamples);
+    sonicDestroyStream(stream);
+    return numSamples;
+}

+ 154 - 0
libs/AVPlayer/Sonic/sonic.h

@@ -0,0 +1,154 @@
+/* Sonic library
+   Copyright 2010
+   Bill Cox
+   This file is part of the Sonic Library.
+   This file is licensed under the Apache 2.0 license, and also placed into the public domain.
+   Use it either way, at your option.
+*/
+
+/*
+The Sonic Library implements a new algorithm invented by Bill Cox for the
+specific purpose of speeding up speech by high factors at high quality.  It
+generates smooth speech at speed up factors as high as 6X, possibly more.  It is
+also capable of slowing down speech, and generates high quality results
+regardless of the speed up or slow down factor.  For speeding up speech by 2X or
+more, the following equation is used:
+    newSamples = period/(speed - 1.0)
+    scale = 1.0/newSamples;
+where period is the current pitch period, determined using AMDF or any other
+pitch estimator, and speed is the speedup factor.  If the current position in
+the input stream is pointed to by "samples", and the current output stream
+position is pointed to by "out", then newSamples number of samples can be
+generated with:
+    out[t] = (samples[t]*(newSamples - t) + samples[t + period]*t)/newSamples;
+where t = 0 to newSamples - 1.
+For speed factors < 2X, the PICOLA algorithm is used.  The above
+algorithm is first used to double the speed of one pitch period.  Then, enough
+input is directly copied from the input to the output to achieve the desired
+speed up factor, where 1.0 < speed < 2.0.  The amount of data copied is derived:
+    speed = (2*period + length)/(period + length)
+    speed*length + speed*period = 2*period + length
+    length(speed - 1) = 2*period - speed*period
+    length = period*(2 - speed)/(speed - 1)
+For slowing down speech where 0.5 < speed < 1.0, a pitch period is inserted into
+the output twice, and length of input is copied from the input to the output
+until the output desired speed is reached.  The length of data copied is:
+    length = period*(speed - 0.5)/(1 - speed)
+For slow down factors below 0.5, no data is copied, and an algorithm
+similar to high speed factors is used.
+*/
+
+#ifdef  __cplusplus
+extern "C" {
+#endif
+
+/* Uncomment this to use sin-wav based overlap add which in theory can improve
+   sound quality slightly, at the expense of lots of floating point math. */
+/* #define SONIC_USE_SIN */
+
+/* This specifies the range of voice pitches we try to match.
+   Note that if we go lower than 65, we could overflow in findPitchInRange */
+#define SONIC_MIN_PITCH 65
+#define SONIC_MAX_PITCH 400
+
+/* These are used to down-sample some inputs to improve speed */
+#define SONIC_AMDF_FREQ 4000
+
+struct sonicStreamStruct;
+typedef struct sonicStreamStruct *sonicStream;
+
+/* For all of the following functions, numChannels is multiplied by numSamples
+   to determine the actual number of values read or returned. */
+
+/* Create a sonic stream.  Return NULL only if we are out of memory and cannot
+  allocate the stream. Set numChannels to 1 for mono, and 2 for stereo. */
+// 创建一个音频流,如果内存溢出不能创建流会返回NULL,numCHannels表示声道的个数,1为单声道,2为双声道
+sonicStream sonicCreateStream(int sampleRate, int numChannels);
+/* Destroy the sonic stream. */
+// 销毁一个音频流
+void sonicDestroyStream(sonicStream stream);
+/* Use this to write floating point data to be speed up or down into the stream.
+   Values must be between -1 and 1.  Return 0 if memory realloc failed, otherwise 1 */
+//
+int sonicWriteFloatToStream(sonicStream stream, float *samples, int numSamples);
+/* Use this to write 16-bit data to be speed up or down into the stream.
+   Return 0 if memory realloc failed, otherwise 1 */
+int sonicWriteShortToStream(sonicStream stream, short *samples, int numSamples);
+/* Use this to write 8-bit unsigned data to be speed up or down into the stream.
+   Return 0 if memory realloc failed, otherwise 1 */
+int sonicWriteUnsignedCharToStream(sonicStream stream, unsigned char *samples, int numSamples);
+/* Use this to read floating point data out of the stream.  Sometimes no data
+   will be available, and zero is returned, which is not an error condition. */
+int sonicReadFloatFromStream(sonicStream stream, float *samples, int maxSamples);
+/* Use this to read 16-bit data out of the stream.  Sometimes no data will
+   be available, and zero is returned, which is not an error condition. */
+int sonicReadShortFromStream(sonicStream stream, short *samples, int maxSamples);
+/* Use this to read 8-bit unsigned data out of the stream.  Sometimes no data will
+   be available, and zero is returned, which is not an error condition. */
+int sonicReadUnsignedCharFromStream(sonicStream stream, unsigned char *samples, int maxSamples);
+/* Force the sonic stream to generate output using whatever data it currently
+   has.  No extra delay will be added to the output, but flushing in the middle of
+   words could introduce distortion. */
+// 立即强制刷新流
+int sonicFlushStream(sonicStream stream);
+/* Return the number of samples in the output buffer */
+// 返回输出缓冲中的采样点数目
+int sonicSamplesAvailable(sonicStream stream);
+/* Get the speed of the stream. */
+// 得到音频流的速度
+float sonicGetSpeed(sonicStream stream);
+/* Set the speed of the stream. */
+// 设置音频流的速度
+void sonicSetSpeed(sonicStream stream, float speed);
+/* Get the pitch of the stream. */
+float sonicGetPitch(sonicStream stream);
+/* Set the pitch of the stream. */
+void sonicSetPitch(sonicStream stream, float pitch);
+/* Get the rate of the stream. */
+float sonicGetRate(sonicStream stream);
+/* Set the rate of the stream. */
+void sonicSetRate(sonicStream stream, float rate);
+/* Get the scaling factor of the stream. */
+float sonicGetVolume(sonicStream stream);
+/* Set the scaling factor of the stream. */
+void sonicSetVolume(sonicStream stream, float volume);
+/* Get the chord pitch setting. */
+int sonicGetChordPitch(sonicStream stream);
+/* Set chord pitch mode on or off.  Default is off.  See the documentation
+   page for a description of this feature. */
+void sonicSetChordPitch(sonicStream stream, int useChordPitch);
+/* Get the quality setting. */
+// 得到音频流的质量
+int sonicGetQuality(sonicStream stream);
+/* Set the "quality".  Default 0 is virtually as good as 1, but very much faster. */
+// 设置音频流的质量,默认的0的质量几乎和1的一样好,但是更快
+void sonicSetQuality(sonicStream stream, int quality);
+/* Get the sample rate of the stream. */
+// 得到音频流的采样率
+int sonicGetSampleRate(sonicStream stream);
+/* Set the sample rate of the stream.  This will drop any samples that have not been read. */
+// 设置音频流的采样率
+void sonicSetSampleRate(sonicStream stream, int sampleRate);
+/* Get the number of channels. */
+// 得到音频的声道数
+int sonicGetNumChannels(sonicStream stream);
+/* Set the number of channels.  This will drop any samples that have not been read. */
+// 设置音频流的声道数
+void sonicSetNumChannels(sonicStream stream, int numChannels);
+/* This is a non-stream oriented interface to just change the speed of a sound
+   sample.  It works in-place on the sample array, so there must be at least
+   speed*numSamples available space in the array. Returns the new number of samples. */
+// 这是一个非面向流的借口,只是改变声音采样的速率。它工作在采样数组内部,
+//所以在数组内至少要有speed*numSampes大小的空间。返回值是新的采样点的数目
+
+int sonicChangeFloatSpeed(float *samples, int numSamples, float speed, float pitch,
+    float rate, float volume, int useChordPitch, int sampleRate, int numChannels);
+/* This is a non-stream oriented interface to just change the speed of a sound
+   sample.  It works in-place on the sample array, so there must be at least
+   speed*numSamples available space in the array. Returns the new number of samples. */
+int sonicChangeShortSpeed(short *samples, int numSamples, float speed, float pitch,
+    float rate, float volume, int useChordPitch, int sampleRate, int numChannels);
+
+#ifdef  __cplusplus
+}
+#endif

+ 35 - 0
libs/AVPlayer/av_clock.h

@@ -0,0 +1,35 @@
+#ifndef AVCLOCK_H
+#define AVCLOCK_H
+
+#include "ffmpeg_compat.h"
+
+class AVClock
+{
+public:
+    AVClock()
+        : m_pts(0.0)
+        , m_drift(0.0)
+    {}
+
+    inline void reset()
+    {
+        m_pts = 0.0;
+        m_drift = 0.0;
+    }
+
+    inline void setClock(double pts) { setCloctAt(pts); }
+
+    inline double getClock() { return m_drift + av_gettime_relative() / 1000000.0; }
+
+private:
+    inline void setCloctAt(double pts)
+    {
+        m_drift = pts - av_gettime_relative() / 1000000.0;
+        m_pts = pts;
+    }
+
+    double m_pts;
+    double m_drift;
+};
+
+#endif // AVCLOCK_H

+ 727 - 0
libs/AVPlayer/av_decoder.cpp

@@ -0,0 +1,727 @@
+#include "av_decoder.h"
+#include <QDebug>
+#include <QThread>
+#include "threadpool.h"
+
+using AVTool::Decoder;
+
+Decoder::Decoder()
+    : m_fmtCtx(nullptr)
+    , m_maxFrameQueueSize(16)
+    , m_maxPacketQueueSize(30)
+    , m_audioIndex(-1)
+    , m_videoIndex(-1)
+    , m_exit(0)
+    , m_duration(0)
+{
+    if (!init())
+        qDebug() << "Decoder init failed!\n";
+
+    setInitVal();
+}
+
+Decoder::~Decoder()
+{
+    exit();
+}
+
+AVTool::MediaInfo* Decoder::detectMediaInfo(const QString& url)
+{
+    int ret = 0;
+    int duration = 0;
+    //解封装初始化
+    AVFormatContext* fmtCtx = avformat_alloc_context();
+
+    //用于获取流时长
+    AVDictionary* formatOpts = nullptr;
+    av_dict_set(&formatOpts, "probesize", "32", 0);
+
+    ret = avformat_open_input(&fmtCtx, url.toUtf8().constData(), nullptr, nullptr);
+    if (ret < 0) {
+        av_strerror(ret, m_errBuf, sizeof(m_errBuf));
+        qDebug() << "avformat_open_input error:" << m_errBuf;
+        av_dict_free(&formatOpts);
+        //打开失败释放分配的AVFormatContext内存
+        avformat_free_context(fmtCtx);
+        return Q_NULLPTR;
+    }
+
+    ret = avformat_find_stream_info(fmtCtx, nullptr);
+    if (ret < 0) {
+        av_strerror(ret, m_errBuf, sizeof(m_errBuf));
+        qDebug() << "avformat_find_stream_info error:" << m_errBuf;
+        av_dict_free(&formatOpts);
+        return Q_NULLPTR;
+    }
+
+    //记录流时长
+    AVRational q = {1, AV_TIME_BASE};
+    duration = (uint32_t) (fmtCtx->duration * av_q2d(q));
+    av_dict_free(&formatOpts);
+
+    int videoIndex = av_find_best_stream(fmtCtx, AVMEDIA_TYPE_VIDEO, -1, -1, nullptr, 0);
+    if (videoIndex < 0) {
+        qDebug() << "no video stream!";
+        return Q_NULLPTR;
+    }
+    //视频解码初始化
+    AVCodecParameters* videoCodecPar = fmtCtx->streams[videoIndex]->codecpar;
+    if (!videoCodecPar) {
+        qDebug() << "videocodecpar is nullptr!";
+        return Q_NULLPTR;
+    }
+
+    AVCodecContext* codecCtx = avcodec_alloc_context3(nullptr);
+    ret = avcodec_parameters_to_context(codecCtx, videoCodecPar);
+    if (ret < 0) {
+        av_strerror(ret, m_errBuf, sizeof(m_errBuf));
+        qDebug() << "error info_avcodec_parameters_to_context:" << m_errBuf;
+        return Q_NULLPTR;
+    }
+
+    const AVCodec* videoCodec = avcodec_find_decoder(codecCtx->codec_id);
+    if (!videoCodec) {
+        qDebug() << "avcodec_find_decoder failed!";
+        return Q_NULLPTR;
+    }
+    codecCtx->codec_id = videoCodec->id;
+
+    ret = avcodec_open2(codecCtx, videoCodec, nullptr);
+    if (ret < 0) {
+        av_strerror(ret, m_errBuf, sizeof(m_errBuf));
+        qDebug() << "error info_avcodec_open2:" << m_errBuf;
+        return Q_NULLPTR;
+    }
+
+    AVPacket* pkt = av_packet_alloc();
+    AVFrame* frame = av_frame_alloc();
+    bool flag = false;
+    while (1) {
+        ret = av_read_frame(fmtCtx, pkt);
+        if (ret != 0) {
+            return Q_NULLPTR;
+        }
+        if (pkt->stream_index == videoIndex) {
+            ret = avcodec_send_packet(codecCtx, pkt);
+            av_packet_unref(pkt);
+            if (ret < 0 || ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
+                av_strerror(ret, m_errBuf, sizeof(m_errBuf));
+                qDebug() << "avcodec_send_packet error:" << m_errBuf;
+                continue;
+            }
+            while (1) {
+                ret = avcodec_receive_frame(codecCtx, frame);
+                if (ret == 0) {
+                    flag = true;
+                    break;
+                } else if (ret == AVERROR(EAGAIN)) {
+                    break;
+                } else {
+                    return Q_NULLPTR;
+                }
+            }
+            if (flag)
+                break;
+        } else {
+            av_packet_unref(pkt);
+        }
+    }
+
+    int imageWidth = videoCodecPar->width;
+    int imageHeight = videoCodecPar->height;
+
+    enum AVPixelFormat dstPixFmt = AV_PIX_FMT_RGB24;
+    int swsFlags = SWS_BICUBIC;
+
+    uint8_t* pixels[4];
+    int pitch[4];
+    //分配存储转换后帧数据的buffer内存
+    int bufSize = av_image_get_buffer_size(dstPixFmt, imageWidth, imageHeight, 1);
+    uint8_t* buffer = (uint8_t*) av_malloc(bufSize * sizeof(uint8_t));
+    av_image_fill_arrays(pixels, pitch, buffer, dstPixFmt, imageWidth, imageHeight, 1);
+
+    SwsContext* swsCtx = sws_getCachedContext(nullptr,
+                                              frame->width,
+                                              frame->height,
+                                              (enum AVPixelFormat) frame->format,
+                                              imageWidth,
+                                              imageHeight,
+                                              dstPixFmt,
+                                              swsFlags,
+                                              nullptr,
+                                              nullptr,
+                                              nullptr);
+    if (swsCtx)
+        sws_scale(swsCtx, frame->data, frame->linesize, 0, frame->height, pixels, pitch);
+    av_frame_unref(frame);
+
+    AVTool::MediaInfo* info = new AVTool::MediaInfo;
+    info->duration = duration;
+    info->tipImg = QImage(pixels[0], imageWidth, imageHeight, pitch[0], QImage::Format_RGB888);
+
+    av_packet_free(&pkt);
+    av_frame_free(&frame);
+
+    avformat_close_input(&fmtCtx);
+    avcodec_free_context(&codecCtx);
+
+    return info;
+}
+
+bool Decoder::init()
+{
+    if (!ThreadPool::init()) {
+        qDebug() << "threadpool init failed!\n";
+        return false;
+    }
+
+    m_audioPacketQueue.pktVec.resize(m_maxPacketQueueSize);
+    m_videoPacketQueue.pktVec.resize(m_maxPacketQueueSize);
+
+    m_audioFrameQueue.frameVec.resize(m_maxFrameQueueSize);
+    m_videoFrameQueue.frameVec.resize(m_maxFrameQueueSize);
+
+    m_audioPktDecoder.codecCtx = nullptr;
+    m_videoPktDecoder.codecCtx = nullptr;
+
+    return true;
+}
+
+void Decoder::setInitVal()
+{
+    m_audioPacketQueue.size = 0;
+    m_audioPacketQueue.pushIndex = 0;
+    m_audioPacketQueue.readIndex = 0;
+    m_audioPacketQueue.serial = 0;
+
+    m_videoPacketQueue.size = 0;
+    m_videoPacketQueue.pushIndex = 0;
+    m_videoPacketQueue.readIndex = 0;
+    m_videoPacketQueue.serial = 0;
+
+    m_audioFrameQueue.size = 0;
+    m_audioFrameQueue.readIndex = 0;
+    m_audioFrameQueue.pushIndex = 0;
+    m_audioFrameQueue.shown = 0;
+
+    m_videoFrameQueue.size = 0;
+    m_videoFrameQueue.readIndex = 0;
+    m_videoFrameQueue.pushIndex = 0;
+    m_videoFrameQueue.shown = 0;
+
+    m_exit = 0;
+
+    m_isSeek = 0;
+
+    m_audSeek = 0;
+    m_vidSeek = 0;
+
+    m_audioPktDecoder.serial = 0;
+    m_videoPktDecoder.serial = 0;
+}
+
+int Decoder::decode(const QString& url)
+{
+    int ret = 0;
+    //解封装初始化
+    m_fmtCtx = avformat_alloc_context();
+
+    //用于获取流时长
+    AVDictionary* formatOpts = nullptr;
+    av_dict_set(&formatOpts, "probesize", "32", 0);
+
+    ret = avformat_open_input(&m_fmtCtx, url.toUtf8().constData(), nullptr, nullptr);
+    if (ret < 0) {
+        av_strerror(ret, m_errBuf, sizeof(m_errBuf));
+        qDebug() << "avformat_open_input error:" << m_errBuf;
+        av_dict_free(&formatOpts);
+        //打开失败释放分配的AVFormatContext内存
+        avformat_free_context(m_fmtCtx);
+        return 0;
+    }
+
+    ret = avformat_find_stream_info(m_fmtCtx, nullptr);
+    if (ret < 0) {
+        av_strerror(ret, m_errBuf, sizeof(m_errBuf));
+        qDebug() << "avformat_find_stream_info error:" << m_errBuf;
+        av_dict_free(&formatOpts);
+        return 0;
+    }
+
+    //记录流时长
+    AVRational q = {1, AV_TIME_BASE};
+    m_duration = (uint32_t) (m_fmtCtx->duration * av_q2d(q));
+    av_dict_free(&formatOpts);
+    //qDebug()<<QString("duration: %1:%2").arg(m_duration/60).arg(m_duration%60)<<endl;
+
+    m_audioIndex = av_find_best_stream(m_fmtCtx, AVMEDIA_TYPE_AUDIO, -1, -1, nullptr, 0);
+    if (m_audioIndex < 0) {
+        qDebug() << "no audio stream!";
+        return 0;
+    }
+
+    m_videoIndex = av_find_best_stream(m_fmtCtx, AVMEDIA_TYPE_VIDEO, -1, -1, nullptr, 0);
+    if (m_videoIndex < 0) {
+        qDebug() << "no video stream!";
+        return 0;
+    }
+
+    //音频解码初始化
+    AVCodecParameters* audioCodecPar = m_fmtCtx->streams[m_audioIndex]->codecpar;
+    if (!audioCodecPar) {
+        qDebug() << "audio par is nullptr!";
+        return 0;
+    }
+
+    m_audioPktDecoder.codecCtx = avcodec_alloc_context3(nullptr);
+
+    ret = avcodec_parameters_to_context(m_audioPktDecoder.codecCtx, audioCodecPar);
+    if (ret < 0) {
+        av_strerror(ret, m_errBuf, sizeof(m_errBuf));
+        qDebug() << "error info_avcodec_parameters_to_context:" << m_errBuf;
+        return 0;
+    }
+
+    const AVCodec* audioCodec = avcodec_find_decoder(m_audioPktDecoder.codecCtx->codec_id);
+    if (!audioCodec) {
+        qDebug() << "avcodec_find_decoder failed!";
+        return 0;
+    }
+    m_audioPktDecoder.codecCtx->codec_id = audioCodec->id;
+
+    ret = avcodec_open2(m_audioPktDecoder.codecCtx, audioCodec, nullptr);
+    if (ret < 0) {
+        av_strerror(ret, m_errBuf, sizeof(m_errBuf));
+        qDebug() << "error info_avcodec_open2:" << m_errBuf;
+        return 0;
+    }
+
+    //视频解码初始化
+    AVCodecParameters* videoCodecPar = m_fmtCtx->streams[m_videoIndex]->codecpar;
+    if (!videoCodecPar) {
+        qDebug() << "videocodecpar is nullptr!";
+        return 0;
+    }
+
+    m_videoPktDecoder.codecCtx = avcodec_alloc_context3(nullptr);
+
+    ret = avcodec_parameters_to_context(m_videoPktDecoder.codecCtx, videoCodecPar);
+    if (ret < 0) {
+        av_strerror(ret, m_errBuf, sizeof(m_errBuf));
+        qDebug() << "error info_avcodec_parameters_to_context:" << m_errBuf;
+        return 0;
+    }
+
+    const AVCodec* videoCodec = avcodec_find_decoder(m_videoPktDecoder.codecCtx->codec_id);
+    if (!videoCodec) {
+        qDebug() << "avcodec_find_decoder failed!";
+        return 0;
+    }
+    m_videoPktDecoder.codecCtx->codec_id = videoCodec->id;
+
+    ret = avcodec_open2(m_videoPktDecoder.codecCtx, videoCodec, nullptr);
+    if (ret < 0) {
+        av_strerror(ret, m_errBuf, sizeof(m_errBuf));
+        qDebug() << "error info_avcodec_open2:" << m_errBuf;
+        return 0;
+    }
+
+    //记录视频帧率
+    m_vidFrameRate = av_guess_frame_rate(m_fmtCtx, m_fmtCtx->streams[m_videoIndex], nullptr);
+
+    setInitVal();
+
+    ThreadPool::addTask(std::bind(&Decoder::demux, this, std::placeholders::_1),
+                        std::make_shared<int>(1));
+    ThreadPool::addTask(std::bind(&Decoder::audioDecode, this, std::placeholders::_1),
+                        std::make_shared<int>(2));
+    ThreadPool::addTask(std::bind(&Decoder::videoDecode, this, std::placeholders::_1),
+                        std::make_shared<int>(3));
+
+    return 1;
+}
+
+void Decoder::exit()
+{
+    m_exit = 1;
+    QThread::msleep(200);
+    clearQueueCache();
+    if (m_fmtCtx) {
+        avformat_close_input(&m_fmtCtx);
+        m_fmtCtx = nullptr;
+    }
+    if (m_audioPktDecoder.codecCtx) {
+        avcodec_free_context(&m_audioPktDecoder.codecCtx);
+        m_audioPktDecoder.codecCtx = nullptr;
+    }
+    if (m_videoPktDecoder.codecCtx) {
+        avcodec_free_context(&m_videoPktDecoder.codecCtx);
+        m_videoPktDecoder.codecCtx = nullptr;
+    }
+}
+
+void Decoder::clearQueueCache()
+{
+    std::lock_guard<std::mutex> lockAP(m_audioPacketQueue.mutex);
+    std::lock_guard<std::mutex> lockVP(m_videoPacketQueue.mutex);
+
+    while (m_audioPacketQueue.size) {
+        av_packet_unref(&m_audioPacketQueue.pktVec[m_audioPacketQueue.readIndex].pkt);
+        m_audioPacketQueue.readIndex = (m_audioPacketQueue.readIndex + 1) % m_maxPacketQueueSize;
+        m_audioPacketQueue.size--;
+    }
+
+    while (m_videoPacketQueue.size) {
+        av_packet_unref(&m_videoPacketQueue.pktVec[m_videoPacketQueue.readIndex].pkt);
+        m_videoPacketQueue.readIndex = (m_videoPacketQueue.readIndex + 1) % m_maxPacketQueueSize;
+        m_videoPacketQueue.size--;
+    }
+
+    std::lock_guard<std::mutex> lockAF(m_audioFrameQueue.mutex);
+    std::lock_guard<std::mutex> lockVF(m_videoFrameQueue.mutex);
+
+    while (m_audioFrameQueue.size) {
+        av_frame_unref(&m_audioFrameQueue.frameVec[m_audioFrameQueue.readIndex].frame);
+        m_audioFrameQueue.readIndex = (m_audioFrameQueue.readIndex + 1) % m_maxFrameQueueSize;
+        m_audioFrameQueue.size--;
+    }
+
+    while (m_videoFrameQueue.size) {
+        av_frame_unref(&m_videoFrameQueue.frameVec[m_videoFrameQueue.readIndex].frame);
+        m_videoFrameQueue.readIndex = (m_videoFrameQueue.readIndex + 1) % m_maxFrameQueueSize;
+        m_videoFrameQueue.size--;
+    }
+}
+
+void Decoder::packetQueueFlush(PacketQueue* queue)
+{
+    std::lock_guard<std::mutex> lockAP(queue->mutex);
+
+    while (queue->size) {
+        av_packet_unref(&queue->pktVec[queue->readIndex].pkt);
+        queue->readIndex = (queue->readIndex + 1) % m_maxPacketQueueSize;
+        queue->size--;
+    }
+    queue->serial++;
+}
+
+void Decoder::seekTo(int32_t target, int32_t seekRel)
+{
+    //上次跳转未完成不处理跳转请求
+    if (m_isSeek == 1)
+        return;
+    if (target < 0)
+        target = 0;
+    m_seekTarget = target;
+    m_seekRel = seekRel;
+    m_isSeek = 1;
+}
+
+void Decoder::demux(std::shared_ptr<void> par)
+{
+    int ret = -1;
+    AVPacket* pkt = av_packet_alloc();
+    while (1) {
+        if (m_exit) {
+            break;
+        }
+        if (m_audioPacketQueue.size >= m_maxPacketQueueSize
+            || m_videoPacketQueue.size >= m_maxPacketQueueSize) {
+            std::this_thread::sleep_for(std::chrono::milliseconds(50));
+            continue;
+        }
+        if (m_isSeek) {
+            //AVRational sq={1,AV_TIME_BASE};
+            //int64_t seekMin = m_seekRel > 0 ? m_seekTarget-m_seekRel+2 : INT64_MIN;
+            //int64_t seekMax = m_seekRel < 0 ? m_seekTarget-m_seekRel-2 : INT64_MAX;
+            //qDebug()<<"seekMin:"<<seekMin<<" seekMax:"<<seekMax<<" seekTarget:"<<m_seekTarget<<endl;
+            //ret=avformat_seek_file(m_fmtCtx,m_audioIndex,seekMin,m_seekTarget,seekMax,AVSEEK_FLAG_BACKWARD);
+            int64_t seekTarget = m_seekTarget * AV_TIME_BASE;
+            ret = av_seek_frame(m_fmtCtx, -1, seekTarget, AVSEEK_FLAG_BACKWARD);
+            if (ret < 0) {
+                av_strerror(ret, m_errBuf, sizeof(m_errBuf));
+                qDebug() << "avformat_seek_file error:" << m_errBuf;
+            } else {
+                packetQueueFlush(&m_audioPacketQueue);
+                packetQueueFlush(&m_videoPacketQueue);
+                m_audSeek = 1;
+                m_vidSeek = 1;
+            }
+            m_isSeek = 0;
+        }
+
+        ret = av_read_frame(m_fmtCtx, pkt);
+        if (ret != 0) {
+            av_packet_free(&pkt);
+            av_strerror(ret, m_errBuf, sizeof(m_errBuf));
+            qDebug() << "av_read_frame error:" << m_errBuf;
+            break;
+        }
+        if (pkt->stream_index == m_audioIndex) {
+            //插入音频包队列
+            //qDebug()<<pkt->pts*av_q2d(m_fmtCtx->streams[m_audioIndex]->time_base)<<endl;
+            pushPacket(&m_audioPacketQueue, pkt);
+        } else if (pkt->stream_index == m_videoIndex) {
+            //插入视频包队列
+            pushPacket(&m_videoPacketQueue, pkt);
+            //av_packet_unref(pkt);
+        } else {
+            av_packet_unref(pkt);
+        }
+    }
+    av_packet_free(&pkt);
+    //是读到文件末尾退出的才清空,强制退出不重复此操作
+    if (!m_exit) {
+        while (m_audioFrameQueue.size)
+            QThread::msleep(50);
+        exit();
+    }
+    //qDebug() << "demuxthread exit";
+}
+
+void Decoder::audioDecode(std::shared_ptr<void> par)
+{
+    AVPacket* pkt = av_packet_alloc();
+    AVFrame* frame = av_frame_alloc();
+    while (1) {
+        if (m_exit) {
+            break;
+        }
+        //音频帧队列长度控制
+        if (m_audioFrameQueue.size >= m_maxFrameQueueSize) {
+            std::this_thread::sleep_for(std::chrono::milliseconds(10));
+            continue;
+        }
+        //从音频包队列取音频包
+        int ret = getPacket(&m_audioPacketQueue, pkt, &m_audioPktDecoder);
+        if (ret) {
+            ret = avcodec_send_packet(m_audioPktDecoder.codecCtx, pkt);
+            av_packet_unref(pkt);
+            if (ret < 0) {
+                av_strerror(ret, m_errBuf, sizeof(m_errBuf));
+                qDebug() << "avcodec_send_packet error:" << m_errBuf;
+                continue;
+            }
+            while (1) {
+                ret = avcodec_receive_frame(m_audioPktDecoder.codecCtx, frame);
+                if (ret == 0) {
+                    if (m_audSeek) {
+                        int pts = (int) frame->pts
+                                  * av_q2d(m_fmtCtx->streams[m_audioIndex]->time_base);
+                        //qDebug()<<"audFrame pts:"<<pts<<endl;
+                        if (pts < m_seekTarget) {
+                            av_frame_unref(frame);
+                            continue;
+                        } else {
+                            m_audSeek = 0;
+                        }
+                    }
+                    //添加到待播放视频帧队列
+                    pushAFrame(frame);
+                } else {
+                    break;
+                }
+            }
+        } else {
+            //qDebug() << "audio packetQueue is empty for decoding!";
+            std::this_thread::sleep_for(std::chrono::milliseconds(20));
+        }
+    }
+    av_packet_free(&pkt);
+    av_frame_free(&frame);
+    //qDebug() << "audioDecode exit";
+}
+
+void Decoder::videoDecode(std::shared_ptr<void> par)
+{
+    AVPacket* pkt = av_packet_alloc();
+    AVFrame* frame = av_frame_alloc();
+    while (1) {
+        if (m_exit) {
+            break;
+        }
+        if (m_videoFrameQueue.size >= m_maxFrameQueueSize) { //视频帧队列长度控制
+            std::this_thread::sleep_for(std::chrono::milliseconds(10));
+            continue;
+        }
+        int ret = getPacket(&m_videoPacketQueue, pkt, &m_videoPktDecoder);
+        if (ret) {
+            ret = avcodec_send_packet(m_videoPktDecoder.codecCtx, pkt);
+            av_packet_unref(pkt);
+            if (ret < 0 || ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
+                av_strerror(ret, m_errBuf, sizeof(m_errBuf));
+                qDebug() << "avcodec_send_packet error:" << m_errBuf;
+                continue;
+            }
+            while (1) {
+                ret = avcodec_receive_frame(m_videoPktDecoder.codecCtx, frame);
+                if (ret == 0) {
+                    if (m_vidSeek) {
+                        int pts = (int) frame->pts
+                                  * av_q2d(m_fmtCtx->streams[m_videoIndex]->time_base);
+                        if (pts < m_seekTarget) {
+                            av_frame_unref(frame);
+                            continue;
+                        } else {
+                            m_vidSeek = 0;
+                        }
+                    }
+                    //AVRational vidRational=av_guess_frame_rate(m_fmtCtx,
+                    //              m_fmtCtx->streams[m_videoIndex],frame);
+                    //添加到待播放视频帧队列
+                    pushVFrame(frame);
+                } else {
+                    break;
+                }
+            }
+        } else {
+            //qDebug() << "video packetQueue is empty for decoding!";
+            std::this_thread::sleep_for(std::chrono::milliseconds(20));
+        }
+    }
+    av_packet_free(&pkt);
+    av_frame_free(&frame);
+    //qDebug() << "videoDecode exit";
+}
+
+int Decoder::getPacket(PacketQueue* queue, AVPacket* pkt, PktDecoder* decoder)
+{
+    std::unique_lock<std::mutex> lock(queue->mutex);
+    while (!queue->size) {
+        bool ret = queue->cond.wait_for(lock, std::chrono::milliseconds(100), [&]() {
+            return queue->size && !m_exit;
+        });
+        if (!ret)
+            return 0;
+    }
+    if (queue->serial != decoder->serial) {
+        //序列号不连续的帧证明发生了跳转操作则直接丢弃
+        //并清空解码器缓存
+        avcodec_flush_buffers(decoder->codecCtx);
+        decoder->serial = queue->pktVec[queue->readIndex].serial;
+        return 0;
+    }
+    av_packet_move_ref(pkt, &queue->pktVec[queue->readIndex].pkt);
+    decoder->serial = queue->pktVec[queue->readIndex].serial;
+    queue->readIndex = (queue->readIndex + 1) % m_maxPacketQueueSize;
+    queue->size--;
+    return 1;
+}
+
+void Decoder::pushPacket(PacketQueue* queue, AVPacket* pkt)
+{
+    std::lock_guard<std::mutex> lock(queue->mutex);
+    av_packet_move_ref(&queue->pktVec[queue->pushIndex].pkt, pkt);
+    queue->pktVec[queue->pushIndex].serial = queue->serial;
+    if (queue->serial == 1)
+        qDebug() << "";
+    queue->pushIndex = (queue->pushIndex + 1) % m_maxPacketQueueSize;
+    queue->size++;
+}
+
+void Decoder::pushAFrame(AVFrame* frame)
+{
+    std::lock_guard<std::mutex> lock(m_audioFrameQueue.mutex);
+    av_frame_move_ref(&m_audioFrameQueue.frameVec[m_audioFrameQueue.pushIndex].frame, frame);
+    m_audioFrameQueue.frameVec[m_audioFrameQueue.pushIndex].serial = m_audioPktDecoder.serial;
+    m_audioFrameQueue.pushIndex = (m_audioFrameQueue.pushIndex + 1) % m_maxFrameQueueSize;
+    m_audioFrameQueue.size++;
+}
+
+int Decoder::getAFrame(AVFrame* frame)
+{
+    if (!frame)
+        return 0;
+    std::unique_lock<std::mutex> lock(m_audioFrameQueue.mutex);
+    while (!m_audioFrameQueue.size) {
+        bool ret = m_audioFrameQueue.cond.wait_for(lock, std::chrono::milliseconds(100), [&]() {
+            return !m_exit && m_audioFrameQueue.size;
+        });
+        if (!ret)
+            return 0;
+    }
+    if (m_audioFrameQueue.frameVec[m_audioFrameQueue.readIndex].serial
+        != m_audioPacketQueue.serial) {
+        av_frame_unref(&m_audioFrameQueue.frameVec[m_audioFrameQueue.readIndex].frame);
+        m_audioFrameQueue.readIndex = (m_audioFrameQueue.readIndex + 1) % m_maxFrameQueueSize;
+        m_audioFrameQueue.size--;
+        return 0;
+    }
+    av_frame_move_ref(frame, &m_audioFrameQueue.frameVec[m_audioFrameQueue.readIndex].frame);
+    m_audioFrameQueue.readIndex = (m_audioFrameQueue.readIndex + 1) % m_maxFrameQueueSize;
+    m_audioFrameQueue.size--;
+    return 1;
+}
+
+void Decoder::pushVFrame(AVFrame* frame)
+{
+    std::lock_guard<std::mutex> lock(m_videoFrameQueue.mutex);
+    m_videoFrameQueue.frameVec[m_videoFrameQueue.pushIndex].serial = m_videoPktDecoder.serial;
+    m_videoFrameQueue.frameVec[m_videoFrameQueue.pushIndex].duration
+        = (m_vidFrameRate.den && m_vidFrameRate.den)
+              ? av_q2d(AVRational{m_vidFrameRate.den, m_vidFrameRate.num})
+              : 0.00;
+    m_videoFrameQueue.frameVec[m_videoFrameQueue.pushIndex].pts
+        = frame->pts * av_q2d(m_fmtCtx->streams[m_videoIndex]->time_base);
+    av_frame_move_ref(&m_videoFrameQueue.frameVec[m_videoFrameQueue.pushIndex].frame, frame);
+    m_videoFrameQueue.pushIndex = (m_videoFrameQueue.pushIndex + 1) % m_maxFrameQueueSize;
+    m_videoFrameQueue.size++;
+    //qDebug()<<"RemainingVFrame:"<<m_videoFrameQueue.size-m_videoFrameQueue.shown;
+}
+
+Decoder::MyFrame* Decoder::peekLastVFrame()
+{
+    Decoder::MyFrame* frame = &m_videoFrameQueue.frameVec[m_videoFrameQueue.readIndex];
+    return frame;
+}
+
+Decoder::MyFrame* Decoder::peekVFrame()
+{
+    while (!m_videoFrameQueue.size) {
+        std::unique_lock<std::mutex> lock(m_videoFrameQueue.mutex);
+        bool ret = m_videoFrameQueue.cond.wait_for(lock, std::chrono::milliseconds(100), [&]() {
+            return !m_exit && m_videoFrameQueue.size;
+        });
+        if (!ret)
+            return nullptr;
+    }
+    int index = (m_videoFrameQueue.readIndex + m_videoFrameQueue.shown) % m_maxFrameQueueSize;
+    Decoder::MyFrame* frame = &m_videoFrameQueue.frameVec[index];
+    return frame;
+}
+
+Decoder::MyFrame* Decoder::peekNextVFrame()
+{
+    while (m_videoFrameQueue.size < 2) {
+        std::unique_lock<std::mutex> lock(m_videoFrameQueue.mutex);
+        bool ret = m_videoFrameQueue.cond.wait_for(lock, std::chrono::milliseconds(100), [&]() {
+            return !m_exit && m_videoFrameQueue.size;
+        }); 
+        if (!ret)
+            return nullptr;
+    }
+    int index = (m_videoFrameQueue.readIndex + m_videoFrameQueue.shown + 1) % m_maxFrameQueueSize;
+    Decoder::MyFrame* frame = &m_videoFrameQueue.frameVec[index];
+    return frame;
+}
+
+void Decoder::setNextVFrame()
+{
+    std::unique_lock<std::mutex> lock(m_videoFrameQueue.mutex);
+    if (!m_videoFrameQueue.size)
+        return;
+    if (!m_videoFrameQueue.shown) {
+        m_videoFrameQueue.shown = 1;
+        return;
+    }
+    av_frame_unref(&m_videoFrameQueue.frameVec[m_videoFrameQueue.readIndex].frame);
+    m_videoFrameQueue.readIndex = (m_videoFrameQueue.readIndex + 1) % m_maxFrameQueueSize;
+    m_videoFrameQueue.size--;
+}
+
+int Decoder::getRemainingVFrame()
+{
+    if (!m_videoFrameQueue.size)
+        return 0;
+    return m_videoFrameQueue.size - m_videoFrameQueue.shown;
+}

+ 179 - 0
libs/AVPlayer/av_decoder.h

@@ -0,0 +1,179 @@
+#ifndef AV_DECODER_H
+#define AV_DECODER_H
+
+#include <QImage>
+#include <QVector>
+#include <condition_variable>
+#include <mutex>
+#include <atomic>
+
+extern "C" {
+}
+#include "ffmpeg_compat.h"
+
+QT_BEGIN_NAMESPACE
+namespace AVTool {
+typedef struct MediaInfo
+{
+    int duration;
+    QImage tipImg;
+} MediaInfo;
+
+class Decoder
+{
+public:
+    typedef struct MyFrame
+    {
+        AVFrame frame;
+        int serial;
+        double duration;
+        double pts;
+    } MyFrame;
+
+private:
+    typedef struct MyPacket
+    {
+        AVPacket pkt;
+        int serial;
+    } MyPacket;
+
+    typedef struct PacketQueue
+    {
+        QVector<MyPacket> pktVec;
+        int readIndex;
+        int pushIndex;
+        int size;
+        int serial;
+        std::mutex mutex;
+        std::condition_variable cond;
+    } PacketQueue;
+
+    typedef struct FrameQueue
+    {
+        QVector<MyFrame> frameVec;
+        int readIndex;
+        int pushIndex;
+        int shown;
+        int size;
+        std::mutex mutex;
+        std::condition_variable cond;
+    } FrameQueue;
+
+    typedef struct PktDecoder
+    {
+        AVCodecContext* codecCtx;
+        int serial;
+    } PktDecoder;
+
+public:
+    Decoder();
+    ~Decoder();
+
+    //成功探测到媒体信息返回类型实例,否则返回nullptr
+    AVTool::MediaInfo* detectMediaInfo(const QString& url);
+
+    void seekTo(int32_t target, int32_t seekRel);
+
+    int getAFrame(AVFrame* frame);
+
+    int getRemainingVFrame();
+    //查看上一帧(即当前显示的画面帧)
+    Decoder::MyFrame* peekLastVFrame();
+    //查看将要显示的帧
+    Decoder::MyFrame* peekVFrame();
+    //查看将要显示帧再下一帧
+    Decoder::MyFrame* peekNextVFrame();
+    //将读索引后移一位
+    void setNextVFrame();
+
+    inline int vidPktSerial() const { return m_videoPacketQueue.serial; }
+
+    inline int audioIndex() const { return m_audioIndex; }
+
+    inline int videoIndex() const { return m_videoIndex; }
+
+    inline AVFormatContext* formatContext() const { return m_fmtCtx; }
+
+    inline AVCodecParameters* audioCodecPar() const
+    {
+        return m_fmtCtx->streams[m_audioIndex]->codecpar;
+    }
+
+    inline AVCodecParameters* videoCodecPar() const
+    {
+        return m_fmtCtx->streams[m_videoIndex]->codecpar;
+    }
+
+    inline uint32_t avDuration() { return m_duration; }
+
+    inline int isExit() { return m_exit; }
+
+    int decode(const QString& url);
+
+    void exit();
+
+private:
+    PacketQueue m_audioPacketQueue;
+    PacketQueue m_videoPacketQueue;
+
+    FrameQueue m_audioFrameQueue;
+    FrameQueue m_videoFrameQueue;
+
+    PktDecoder m_audioPktDecoder;
+    PktDecoder m_videoPktDecoder;
+
+    AVFormatContext* m_fmtCtx;
+
+    const int m_maxFrameQueueSize;
+    const int m_maxPacketQueueSize;
+
+    AVRational m_vidFrameRate;
+
+    int m_audioIndex;
+    int m_videoIndex;
+
+    std::atomic<int> m_exit;
+
+    //是否执行跳转
+    int m_isSeek;
+
+    //跳转后等待目标帧标志
+    int m_vidSeek;
+    int m_audSeek;
+
+    //跳转相对时间
+    int64_t m_seekRel;
+
+    //跳转绝对时间
+    int64_t m_seekTarget;
+
+    //流总时长/S
+    uint32_t m_duration;
+
+    char m_errBuf[100];
+
+private:
+    bool init();
+
+    void setInitVal();
+
+    void packetQueueFlush(PacketQueue* queue);
+
+    void clearQueueCache();
+
+    void demux(std::shared_ptr<void> par);
+
+    void audioDecode(std::shared_ptr<void> par);
+    void videoDecode(std::shared_ptr<void> par);
+
+    int getPacket(PacketQueue* queue, AVPacket* pkt, PktDecoder* decoder);
+    void pushPacket(PacketQueue* queue, AVPacket* pkt);
+
+    void pushAFrame(AVFrame* frame);
+
+    void pushVFrame(AVFrame* frame);
+};
+} // namespace AVTool
+QT_END_NAMESPACE
+
+#endif // AV_DECODER_H

+ 484 - 0
libs/AVPlayer/av_player.cpp

@@ -0,0 +1,484 @@
+#include "av_player.h"
+#include <QDebug>
+#include <QImage>
+#include <QThread>
+#include "threadpool.h"
+#include "vframe.h"
+
+//同步阈值下限
+#define AV_SYNC_THRESHOLD_MIN 0.04
+//同步阈值上限
+#define AV_SYNC_THRESHOLD_MAX 0.1
+//单帧视频时长阈值上限,用于适配低帧时同步,
+//帧率过低视频帧超前不适合翻倍延迟,应特殊
+//处理,这里设置上限一秒10帧
+#define AV_SYNC_FRAMEDUP_THRESHOLD 0.1
+//同步操作摆烂阈值上限,此时同步已无意义
+#define AV_NOSYNC_THRESHOLD 10.0
+
+#define AV_SYNC_REJUDGESHOLD 0.01
+
+AVPlayer::AVPlayer()
+    : m_decoder(new Decoder)
+    , m_audioFrame(av_frame_alloc())
+    , m_imageWidth(300)
+    , m_imageHeight(300)
+    , m_swrCtx(nullptr)
+    , m_swsCtx(nullptr)
+    , m_buffer(nullptr)
+    , m_audioBuf(nullptr)
+    , m_duration(0)
+    , m_volume(30)
+    , m_exit(0)
+    , m_pause(0)
+    , m_playSpeed(1.0)
+{}
+
+AVPlayer::~AVPlayer()
+{
+    av_frame_free(&m_audioFrame);
+    clearPlayer();
+    delete m_decoder;
+    if (m_swrCtx)
+        swr_free(&m_swrCtx);
+    if (m_swsCtx)
+        sws_freeContext(m_swsCtx);
+    if (m_audioBuf)
+        av_free(m_audioBuf);
+    if (m_buffer)
+        av_free(m_buffer);
+}
+
+int AVPlayer::play(const QString& url)
+{
+    clearPlayer();
+    if (!m_decoder->decode(url)) {
+        qDebug() << "decode failed";
+        return 0;
+    }
+
+    //解码成功可获取流时长
+    m_duration = m_decoder->avDuration();
+    emit AVDurationChanged(m_duration);
+
+    m_pause = 0;
+    m_clockInitFlag = -1;
+
+    if (!initSDL()) {
+        qDebug() << "init sdl failed!";
+        return 0;
+    }
+    initVideo();
+    return 1;
+}
+
+void fillAStreamCallback(void* userdata, uint8_t* stream, int len)
+{
+    memset(stream, 0, len);
+    AVPlayer* is = (AVPlayer*) userdata;
+    static double audioPts = 0.00;
+    while (len > 0) {
+        if (is->m_exit)
+            return;
+        if (is->m_audioBufIndex >= is->m_audioBufSize) { /*index到缓冲区末尾,重新填充数据*/
+            int ret = is->m_decoder->getAFrame(is->m_audioFrame);
+            if (ret) {
+                is->m_audioBufIndex = 0;
+                if ((is->m_targetSampleFmt != is->m_audioFrame->format
+                     || is->m_targetChannelLayout != (int64_t)ffmpeg_get_frame_channel_layout(is->m_audioFrame)
+                     || is->m_targetFreq != is->m_audioFrame->sample_rate
+                     || is->m_targetNbSamples != is->m_audioFrame->nb_samples)
+                    && !is->m_swrCtx) {
+                    is->m_swrCtx = ffmpeg_swr_alloc_set_opts(nullptr,
+                                                      is->m_targetChannelLayout,
+                                                      is->m_targetSampleFmt,
+                                                      is->m_targetFreq,
+                                                      (int64_t)ffmpeg_get_frame_channel_layout(is->m_audioFrame),
+                                                      (enum AVSampleFormat) is->m_audioFrame->format,
+                                                      is->m_audioFrame->sample_rate,
+                                                      0,
+                                                      nullptr);
+                    if (!is->m_swrCtx || swr_init(is->m_swrCtx) < 0) {
+                        qDebug() << "swr_init failed";
+                        return;
+                    }
+                }
+                if (is->m_swrCtx) {
+                    const uint8_t** in = (const uint8_t**) is->m_audioFrame->extended_data;
+                    int out_count = (uint64_t) is->m_audioFrame->nb_samples * is->m_targetFreq
+                                        / is->m_audioFrame->sample_rate
+                                    + 256;
+                    int out_size = ffmpeg_get_buffer_size(is->m_targetSampleFmt,
+                                                              is->m_targetChannels,
+                                                              out_count,
+                                                              0);
+                    if (out_size < 0) {
+                        qDebug() << "av_samples_get_buffer_size failed";
+                        return;
+                    }
+                    av_fast_malloc(&is->m_audioBuf, &is->m_audioBufSize, out_size);
+                    if (!is->m_audioBuf) {
+                        qDebug() << "av_fast_malloc failed";
+                        return;
+                    }
+                    int len2 = swr_convert(is->m_swrCtx,
+                                           &is->m_audioBuf,
+                                           out_count,
+                                           in,
+                                           is->m_audioFrame->nb_samples);
+                    if (len2 < 0) {
+                        qDebug() << "swr_convert failed";
+                        return;
+                    }
+                    if (is->m_playSpeed != 1.0) {
+                        sonicSetSpeed(is->m_sonicStream, is->m_playSpeed);
+                        int ret = sonicWriteShortToStream(is->m_sonicStream,
+                                                          (short*) is->m_audioBuf,
+                                                          len2);
+                        int availSamples = sonicSamplesAvailable(is->m_sonicStream);
+                        if (!availSamples) {
+                            is->m_audioBufSize = is->m_audioBufIndex;
+                            continue;
+                        }
+                        int numSamples = availSamples;
+                        int bytes = numSamples * is->m_targetChannels
+                                    * av_get_bytes_per_sample(is->m_targetSampleFmt);
+                        if (bytes > out_size) {
+                            av_fast_malloc(&is->m_audioBuf, &is->m_audioBufSize, bytes);
+                        }
+                        len2 = sonicReadShortFromStream(is->m_sonicStream,
+                                                        (short*) is->m_audioBuf,
+                                                        numSamples);
+                    }
+                    is->m_audioBufSize = len2 * is->m_targetChannels
+                                         * av_get_bytes_per_sample(is->m_targetSampleFmt);
+                } else {
+                    is->m_audioBufSize = ffmpeg_get_buffer_size(is->m_targetSampleFmt,
+                                                                    is->m_targetChannels,
+                                                                    is->m_audioFrame->nb_samples,
+                                                                    0);
+                    av_fast_malloc(&is->m_audioBuf, &is->m_audioBufSize, is->m_audioBufSize + 256);
+                    if (!is->m_audioBuf) {
+                        qDebug() << "av_fast_malloc failed";
+                        return;
+                    }
+                    memcpy(is->m_audioBuf, is->m_audioFrame->data[0], is->m_audioBufSize);
+                }
+                audioPts = is->m_audioFrame->pts
+                           * av_q2d(is->m_fmtCtx->streams[is->m_audioIndex]->time_base);
+                //qDebug()<<is->m_audioPts;
+                av_frame_unref(is->m_audioFrame);
+            } else {
+                //判断是否真正播放到文件末尾
+                if (is->m_decoder->isExit()) {
+                    emit is->AVTerminate();
+                }
+                return;
+            }
+        }
+        int len1 = is->m_audioBufSize - is->m_audioBufIndex;
+        len1 = (len1 > len ? len : len1);
+        SDL_MixAudio(stream, is->m_audioBuf + is->m_audioBufIndex, len1, is->m_volume);
+        len -= len1;
+        is->m_audioBufIndex += len1;
+        stream += len1;
+    }
+    //记录音频时钟
+    is->m_audioClock.setClock(audioPts);
+    //发送时间戳变化信号,因为进度以整数秒单位变化展示,
+    //所以大于一秒才发送,避免过于频繁的信号槽通信消耗性能
+    uint32_t _pts = (uint32_t) audioPts;
+    if (is->m_lastAudPts != _pts) {
+        emit is->AVPtsChanged(_pts);
+        is->m_lastAudPts = _pts;
+    }
+}
+
+int AVPlayer::initSDL()
+{
+    if (SDL_Init(SDL_INIT_AUDIO) != 0) {
+        qDebug() << "SDL_Init failed";
+        return 0;
+    }
+
+    m_exit = 0;
+
+    m_audioBufSize = 0;
+    m_audioBufIndex = 0;
+
+    m_lastAudPts = -1;
+
+    m_audioCodecPar = m_decoder->audioCodecPar();
+    m_audioIndex = m_decoder->audioIndex();
+    m_fmtCtx = m_decoder->formatContext();
+
+    SDL_AudioSpec wanted_spec;
+    // wanted_spec.channels = m_audioCodecPar->channels;
+    wanted_spec.channels = ffmpeg_get_codec_channels(m_fmtCtx->streams[m_audioIndex]);
+    wanted_spec.freq = m_audioCodecPar->sample_rate;
+    wanted_spec.format = AUDIO_S16SYS;
+    wanted_spec.silence = 0;
+    wanted_spec.callback = fillAStreamCallback;
+    wanted_spec.userdata = this;
+    wanted_spec.samples = m_audioCodecPar->frame_size;
+
+    if (SDL_OpenAudio(&wanted_spec, nullptr) < 0) {
+        qDebug() << "SDL_OpenAudio failed";
+        return 0;
+    }
+    m_targetSampleFmt = AV_SAMPLE_FMT_S16;
+    // m_targetChannels = m_audioCodecPar->channels;
+    m_targetChannels = ffmpeg_get_codec_channels(m_fmtCtx->streams[m_audioIndex]);
+    m_targetFreq = m_audioCodecPar->sample_rate;
+    m_targetChannelLayout = (int64_t)ffmpeg_get_default_channel_layout(m_targetChannels);
+    m_targetNbSamples = m_audioCodecPar->frame_size;
+    m_audioIndex = m_decoder->audioIndex();
+    m_fmtCtx = m_decoder->formatContext();
+
+    m_sonicStream = sonicCreateStream(m_targetFreq, m_targetChannels);
+    sonicSetQuality(m_sonicStream, 1);
+
+    SDL_PauseAudio(0);
+    return 1;
+}
+
+int AVPlayer::initVideo()
+{
+    m_frameTimer = 0.00;
+
+    m_videoCodecPar = m_decoder->videoCodecPar();
+    m_videoIndex = m_decoder->videoIndex();
+
+    m_imageWidth = m_videoCodecPar->width;
+    m_imageHeight = m_videoCodecPar->height;
+
+    m_dstPixFmt = AV_PIX_FMT_YUV422P;
+    m_swsFlags = SWS_BICUBIC;
+
+    //分配存储转换后帧数据的buffer内存
+    int bufSize = av_image_get_buffer_size(m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
+    m_buffer = (uint8_t*) av_realloc(m_buffer, bufSize * sizeof(uint8_t));
+    av_image_fill_arrays(m_pixels, m_pitch, m_buffer, m_dstPixFmt, m_imageWidth, m_imageHeight, 1);
+    //视频帧播放回调递插入线程池任务队列
+    if (!ThreadPool::addTask(std::bind(&AVPlayer::videoCallback, this, std::placeholders::_1),
+                             std::make_shared<int>(0))) {
+        qDebug() << "videoCallback add task failed!";
+    }
+    return 1;
+}
+
+void AVPlayer::pause(bool isPause)
+{
+    if (SDL_GetAudioStatus() == SDL_AUDIO_STOPPED)
+        return;
+    if (isPause) {
+        if (SDL_GetAudioStatus() == SDL_AUDIO_PLAYING) {
+            SDL_PauseAudio(1);
+            m_pauseTime = av_gettime_relative() / 1000000.0;
+            m_pause = 1;
+        }
+    } else {
+        if (SDL_GetAudioStatus() == SDL_AUDIO_PAUSED) {
+            SDL_PauseAudio(0);
+            m_frameTimer += av_gettime_relative() / 1000000.0 - m_pauseTime;
+            m_pause = 0;
+        }
+    }
+}
+
+void AVPlayer::clearPlayer()
+{
+    if (playState() != AV_STOPPED) {
+        m_exit = 1;
+        if (playState() == AV_PLAYING)
+            SDL_PauseAudio(1);
+        m_decoder->exit();
+        SDL_CloseAudio();
+        if (m_swrCtx)
+            swr_free(&m_swrCtx);
+        if (m_swsCtx)
+            sws_freeContext(m_swsCtx);
+        m_swrCtx = nullptr;
+        m_swsCtx = nullptr;
+        sonicDestroyStream(m_sonicStream);
+    }
+}
+
+AVTool::MediaInfo* AVPlayer::detectMediaInfo(const QString& url)
+{
+    return m_decoder->detectMediaInfo(url);
+}
+
+AVPlayer::PlayState AVPlayer::playState()
+{
+    AVPlayer::PlayState state;
+    switch (SDL_GetAudioStatus()) {
+    case SDL_AUDIO_PLAYING:
+        state = AVPlayer::AV_PLAYING;
+        break;
+    case SDL_AUDIO_PAUSED:
+        state = AVPlayer::AV_PAUSED;
+        break;
+    case SDL_AUDIO_STOPPED:
+        state = AVPlayer::AV_STOPPED;
+        break;
+    default:
+        break;
+    }
+    return state;
+}
+
+void AVPlayer::initAVClock()
+{
+    m_audioClock.setClock(0.00);
+    m_videoClock.setClock(0.00);
+    m_clockInitFlag = 1;
+}
+
+void AVPlayer::displayImage(AVFrame* frame)
+{
+    if (frame) {
+        //判断若是否需要格式转换
+        if ((m_videoCodecPar->width != m_imageWidth || m_videoCodecPar->height != m_imageHeight
+             || m_videoCodecPar->format != m_dstPixFmt)
+            && !m_swsCtx) {
+            m_swsCtx = sws_getCachedContext(m_swsCtx,
+                                            frame->width,
+                                            frame->height,
+                                            (enum AVPixelFormat) frame->format,
+                                            m_imageWidth,
+                                            m_imageHeight,
+                                            m_dstPixFmt,
+                                            m_swsFlags,
+                                            nullptr,
+                                            nullptr,
+                                            nullptr);
+        }
+        if (m_swsCtx) {
+            sws_scale(m_swsCtx, frame->data, frame->linesize, 0, frame->height, m_pixels, m_pitch);
+            uint8_t* planes[4] = { m_pixels[0], m_pixels[1], m_pixels[2], m_pixels[3] };
+            int lines[4] = { m_pitch[0], m_pitch[1], m_pitch[2], m_pitch[3] };
+            emit frameChanged(QSharedPointer<VideoFrame>::create(m_dstPixFmt,
+                                                                 m_imageWidth,
+                                                                 m_imageHeight,
+                                                                 planes,
+                                                                 lines));
+        } else {
+            // 直接使用解码帧的平面数据
+            emit frameChanged(QSharedPointer<VideoFrame>::create((AVPixelFormat)m_videoCodecPar->format,
+                                                                 m_imageWidth,
+                                                                 m_imageHeight,
+                                                                 frame->data,
+                                                                 frame->linesize));
+        }
+        //记录视频时钟
+        m_videoClock.setClock(frame->pts * av_q2d(m_fmtCtx->streams[m_videoIndex]->time_base));
+    }
+}
+
+void AVPlayer::videoCallback(std::shared_ptr<void> par)
+{
+    double time = 0.00;
+    double duration = 0.00;
+    double delay = 0.00;
+    if (m_clockInitFlag == -1) {
+        initAVClock();
+    }
+    do {
+        if (m_exit)
+            break;
+        if (m_pause) {
+            std::this_thread::sleep_for(std::chrono::milliseconds(50));
+            continue;
+        }
+        if (m_decoder->getRemainingVFrame()) {
+            MyFrame* lastFrame = m_decoder->peekLastVFrame();
+            MyFrame* frame = m_decoder->peekVFrame();
+
+            //qDebug()<<"video pts:"<<frame->pts;
+
+            if (frame->serial != m_decoder->vidPktSerial()) {
+                m_decoder->setNextVFrame();
+                continue;
+            }
+
+            if (frame->serial != lastFrame->serial)
+                m_frameTimer = av_gettime_relative() / 1000000.0;
+
+            duration = vpDuration(lastFrame, frame);
+            delay = computeTargetDelay(duration);
+
+            time = av_gettime_relative() / 1000000.0;
+
+            //qDebug()<<"delay:"<<delay<<endl;
+
+            //显示时长未到
+            if (time < m_frameTimer + delay) {
+                QThread::msleep(
+                    (uint32_t) (FFMIN(AV_SYNC_REJUDGESHOLD, m_frameTimer + delay - time) * 1000));
+                continue;
+            }
+
+            m_frameTimer += delay;
+            if (time - m_frameTimer > AV_SYNC_THRESHOLD_MAX)
+                m_frameTimer = time;
+
+            //队列中未显示帧一帧以上执行逻辑丢帧判断,倍速播放和逐帧播放
+            //都不跑进此逻辑,倍速易造成丢帧过多导致界面不流畅
+            if (m_playSpeed == 1.0 && m_decoder->getRemainingVFrame() > 1) {
+                MyFrame* nextFrame = m_decoder->peekNextVFrame();
+                duration = nextFrame->pts - frame->pts;
+                //若主时钟超前到大于当前帧理论显示应持续的时间了,则当前帧立即丢弃
+                if (time > m_frameTimer + duration) {
+                    m_decoder->setNextVFrame();
+                    qDebug() << "abandon vframe" << Qt::endl;
+                    continue;
+                }
+            }
+
+            displayImage(&frame->frame);
+            //读索引后移
+            m_decoder->setNextVFrame();
+        } else {
+            QThread::msleep(10);
+        }
+    } while (true);
+    //qDebug()<<"videoCallBack exit"<<endl;
+}
+
+double AVPlayer::computeTargetDelay(double delay)
+{
+    //视频当前显示帧与当前播放音频帧时间戳差值
+    double diff = m_videoClock.getClock() - m_audioClock.getClock();
+
+    //计算同步阈值
+    double sync = FFMAX(AV_SYNC_THRESHOLD_MIN, FFMIN(AV_SYNC_THRESHOLD_MAX, delay));
+
+    //不同步时间超过阈值直接放弃同步
+    if (!isnan(diff) && fabs(diff) < AV_NOSYNC_THRESHOLD) {
+        if (diff
+            <= -sync) { //视频已落后音频大于一帧的显示时长,delay值应为0,立马将当前帧显示追赶音频
+            delay = FFMAX(0, diff + delay);
+        } else if (diff >= sync
+                   && delay
+                          > AV_SYNC_FRAMEDUP_THRESHOLD) { //视频超前音频大于一个视频帧的时间,延时一个视频帧时间+已超时时间,下次判定将至少被延时到下个将要显示的视频帧pts
+            delay = diff + delay;
+        } else if (diff >= sync) { //高帧率视频直接延时两个视频帧时间;;;;
+            delay = 2 * delay;
+        }
+    }
+    return delay;
+}
+
+double AVPlayer::vpDuration(MyFrame* lastFrame, MyFrame* curFrame)
+{
+    if (curFrame->serial == lastFrame->serial) {
+        double duration = curFrame->pts - lastFrame->pts;
+        if (isnan(duration) || duration > AV_NOSYNC_THRESHOLD)
+            return lastFrame->duration;
+        else
+            return duration;
+    } else {
+        return 0.00;
+    }
+}

+ 162 - 0
libs/AVPlayer/av_player.h

@@ -0,0 +1,162 @@
+#ifndef AV_PLAYER_H
+#define AV_PLAYER_H
+
+#include <QObject>
+#include <QSharedPointer>
+#include <QSize>
+#include <atomic>
+#include "av_clock.h"
+#include "av_decoder.h"
+#include "sonic.h"
+
+extern "C" {
+#include <SDL.h>
+}
+#include "ffmpeg_compat.h"
+
+using AVTool::Decoder;
+
+class VideoFrame;
+
+typedef Decoder::MyFrame MyFrame;
+
+class AVPlayer : public QObject
+{
+    Q_OBJECT
+
+    friend void fillAStreamCallback(void* userdata, uint8_t* stream, int len);
+
+public:
+    enum PlayState { AV_STOPPED, AV_PLAYING, AV_PAUSED };
+
+    AVPlayer();
+    ~AVPlayer();
+
+    int play(const QString& url);
+
+    void pause(bool isPause);
+
+    void clearPlayer();
+
+    AVTool::MediaInfo* detectMediaInfo(const QString& url);
+
+    AVPlayer::PlayState playState();
+
+    void setVFrameSize(const QSize& size)
+    {
+        m_imageWidth = size.width();
+        m_imageHeight = size.height();
+    }
+
+    inline void setVolume(int volumePer)
+    {
+        m_volume = (volumePer * SDL_MIX_MAXVOLUME / 100) % (SDL_MIX_MAXVOLUME + 1);
+    }
+
+    inline void seekBy(int32_t time_s) { seekTo((int32_t) m_audioClock.getClock() + time_s); }
+
+    inline void seekTo(int32_t time_s)
+    {
+        if (playState() == AV_STOPPED)
+            return;
+        if (time_s < 0)
+            time_s = 0;
+        m_decoder->seekTo(time_s, time_s - (int32_t) m_audioClock.getClock());
+    }
+
+    inline uint32_t avDuration() { return m_duration; }
+
+    inline void setSpeed(float speed) { m_playSpeed = speed; }
+
+signals:
+    void frameChanged(QSharedPointer<VideoFrame> frame);
+    void AVTerminate();
+    void AVPtsChanged(unsigned int pts);
+    void AVDurationChanged(unsigned int duration);
+
+private:
+    int initSDL();
+    int initVideo();
+    void videoCallback(std::shared_ptr<void> par);
+    double computeTargetDelay(double delay);
+    double vpDuration(MyFrame* lastFrame, MyFrame* curFrame);
+    void displayImage(AVFrame* frame);
+    void initAVClock();
+
+private:
+    //解码器实例
+    Decoder* m_decoder;
+    AVFormatContext* m_fmtCtx;
+
+    AVCodecParameters* m_audioCodecPar;
+    SwrContext* m_swrCtx;
+    uint8_t* m_audioBuf;
+
+    sonicStream m_sonicStream;
+
+    uint32_t m_audioBufSize;
+    uint32_t m_audioBufIndex;
+    uint32_t m_duration;
+
+    uint32_t m_lastAudPts;
+
+    enum AVSampleFormat m_targetSampleFmt;
+
+    //记录音视频帧最新播放帧的时间戳,用于同步
+    //    double m_audioPts;
+    //    double m_videoPts;
+
+    double m_frameTimer;
+
+    std::atomic<float> m_playSpeed;
+
+    //延时时间
+    double m_delay;
+
+    //音频播放时钟
+    AVClock m_audioClock;
+
+    //视频播放时钟
+    AVClock m_videoClock;
+
+    int m_targetChannels;
+    int m_targetFreq;
+    int m_targetChannelLayout;
+    int m_targetNbSamples;
+
+    int m_volume;
+
+    //同步时钟初始化标志,音视频异步线程
+    //谁先读到初始标志位就由谁初始化时钟
+    int m_clockInitFlag;
+
+    int m_audioIndex;
+    int m_videoIndex;
+
+    int m_imageWidth;
+    int m_imageHeight;
+
+    //终止标志
+    std::atomic<int> m_exit;
+
+    //记录暂停前的时间
+    double m_pauseTime;
+
+    //暂停标志
+    std::atomic<int> m_pause;
+
+    AVFrame* m_audioFrame;
+
+    AVCodecParameters* m_videoCodecPar;
+
+    enum AVPixelFormat m_dstPixFmt;
+    int m_swsFlags;
+    SwsContext* m_swsCtx;
+
+    uint8_t* m_buffer;
+
+    uint8_t* m_pixels[4];
+    int m_pitch[4];
+};
+
+#endif

+ 950 - 0
libs/AVPlayer/avopenglwidget.cpp

@@ -0,0 +1,950 @@
+#include "avopenglwidget.h"
+#include <QDebug>
+#include <QFont>
+#include <QFontMetrics>
+#include <QPainter>
+#include <QTimer>
+#include <climits>
+#include <new>
+
+AVOpenGLWidget::AVOpenGLWidget(QWidget* parent)
+    : QOpenGLWidget(parent)
+    , m_program(nullptr)
+    , m_textureId(0)
+    , m_frameData(nullptr)
+    , m_frameWidth(0)
+    , m_frameHeight(0)
+    , m_frameFormat(0)
+    , m_frameUpdated(false)
+    , m_initialized(false)
+    , m_keepAspectRatio(true)
+    , m_gray(false)
+    , m_threshold(false)
+    , m_thresholdValue(0.5f)
+    , m_blur(false)
+    , m_blurRadius(1.0f)
+    , m_reverse(false)
+    , m_colorReduce(false)
+    , m_colorReduceLevel(0)
+    , m_gamma(false)
+    , m_gammaValue(1.0f)
+    , m_contrastBright(false)
+    , m_contrast(1.0f)
+    , m_brightness(0.0f)
+    , m_mirror(false)
+    , m_noVideoTip(QStringLiteral("视频未开始"))
+    , m_tipTexture(0)
+    , m_tipAngle(0.0f)
+{
+    // setAttribute(Qt::WA_NoSystemBackground, true);
+    // setAttribute(Qt::WA_OpaquePaintEvent, true);
+    // setAutoFillBackground(false);
+    // 设置顶点坐标
+    m_vertices[0] = -1.0f;
+    m_vertices[1] = -1.0f;
+    m_vertices[2] = 1.0f;
+    m_vertices[3] = -1.0f;
+    m_vertices[4] = -1.0f;
+    m_vertices[5] = 1.0f;
+    m_vertices[6] = 1.0f;
+    m_vertices[7] = 1.0f;
+
+    // 设置纹理坐标
+    m_texCoords[0] = 0.0f;
+    m_texCoords[1] = 1.0f;
+    m_texCoords[2] = 1.0f;
+    m_texCoords[3] = 1.0f;
+    m_texCoords[4] = 0.0f;
+    m_texCoords[5] = 0.0f;
+    m_texCoords[6] = 1.0f;
+    m_texCoords[7] = 0.0f;
+
+    // 3D文本旋转动画定时器
+    m_tipTimer = new QTimer(this);
+    connect(m_tipTimer, &QTimer::timeout, this, [this]() {
+        m_tipAngle += 2.0f;
+        if (m_tipAngle > 360.0f)
+            m_tipAngle -= 360.0f;
+        update();
+    });
+    m_tipTimer->start(30);
+    // 初始化时生成一次纹理,防止初次显示时未生成
+    m_frameData = nullptr;
+    m_frameUpdated = false;
+    m_tipImage = QImage();
+    m_tipTexture = 0;
+}
+
+AVOpenGLWidget::~AVOpenGLWidget()
+{
+    Close();
+}
+
+bool AVOpenGLWidget::Open(unsigned int width, unsigned int height)
+{
+    QMutexLocker locker(&m_mutex);
+
+    m_frameWidth = width;
+    m_frameHeight = height;
+
+    // 如果已经有数据,释放它
+    if (m_frameData) {
+        delete[] m_frameData;
+    }
+
+    // 分配新的内存
+    m_frameData = new unsigned char[width * height * 4]; // RGBA格式
+    memset(m_frameData, 0, width * height * 4);
+
+    return true;
+}
+
+void AVOpenGLWidget::Close()
+{
+    makeCurrent();
+    if (m_textureId) {
+        glDeleteTextures(1, &m_textureId);
+        m_textureId = 0;
+    }
+    if (m_program) {
+        delete m_program;
+        m_program = nullptr;
+    }
+    doneCurrent();
+
+    // 释放帧数据
+    QMutexLocker locker(&m_mutex);
+    if (m_frameData) {
+        delete[] m_frameData;
+        m_frameData = nullptr;
+    }
+
+    m_frameWidth = 0;
+    m_frameHeight = 0;
+    m_frameUpdated = false;
+    m_initialized = false;
+}
+
+void AVOpenGLWidget::initializeGL()
+{
+    initializeOpenGLFunctions();
+    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
+
+    // 创建统一shader,支持多特效
+    if (m_program) {
+        delete m_program;
+        m_program = nullptr;
+    }
+    m_program = new QOpenGLShaderProgram();
+    m_program->addShaderFromSourceCode(QOpenGLShader::Vertex,
+                                       "attribute vec2 vertexIn;\n"
+                                       "attribute vec2 textureIn;\n"
+                                       "varying vec2 textureOut;\n"
+                                       "void main(void)\n"
+                                       "{\n"
+                                       "    gl_Position = vec4(vertexIn, 0.0, 1.0);\n"
+                                       "    textureOut = textureIn;\n"
+                                       "}\n");
+    m_program->addShaderFromSourceCode(QOpenGLShader::Fragment,
+                                       R"Raw(
+        varying vec2 textureOut;
+        uniform sampler2D texture;
+        uniform vec2 uTextureSize;
+        uniform bool uGray;
+        uniform bool uThreshold;
+        uniform float uThresholdValue;
+        uniform bool uBlur;
+        uniform float uBlurRadius;
+        uniform bool uReverse;
+        uniform bool uColorReduce;
+        uniform int uColorReduceLevel;
+        uniform bool uGamma;
+        uniform float uGammaValue;
+        uniform bool uContrastBright;
+        uniform float uContrast;
+        uniform float uBrightness;
+        uniform bool uMirror;
+        void main(void)
+        {
+            vec2 uv = textureOut;
+            if (uMirror) {
+                uv.x = 1.0 - uv.x;
+            }
+            vec4 color = texture2D(texture, uv);
+            // 灰度
+            if (uGray) {
+                float gray = dot(color.rgb, vec3(0.299, 0.587, 0.114));
+                color = vec4(gray, gray, gray, color.a);
+            }
+            // 二值化
+            if (uThreshold) {
+                float v = dot(color.rgb, vec3(0.299, 0.587, 0.114));
+                float th = v > uThresholdValue ? 1.0 : 0.0;
+                color = vec4(th, th, th, color.a);
+            }
+            // 简单3x3均值模糊
+            if (uBlur) {
+                vec2 tex_offset = vec2(1.0) / uTextureSize;
+                vec4 sum = vec4(0.0);
+                for (int dx = -1; dx <= 1; ++dx)
+                for (int dy = -1; dy <= 1; ++dy)
+                    sum += texture2D(texture, uv + vec2(dx, dy) * tex_offset * uBlurRadius);
+                color = sum / 9.0;
+            }
+            // 反色
+            if (uReverse) {
+                color.rgb = vec3(1.0) - color.rgb;
+            }
+            // 色彩减少
+            if (uColorReduce) {
+                color.rgb = floor(color.rgb * float(uColorReduceLevel)) / float(uColorReduceLevel);
+            }
+            // 伽马
+            if (uGamma) {
+                color.rgb = pow(color.rgb, vec3(1.0 / uGammaValue));
+            }
+            // 对比度/亮度
+            if (uContrastBright) {
+                color.rgb = color.rgb * uContrast + uBrightness;
+            }
+            gl_FragColor = color;
+        }
+        )Raw");
+    m_program->bindAttributeLocation("vertexIn", 0);
+    m_program->bindAttributeLocation("textureIn", 1);
+    m_program->link();
+
+    // 创建纹理
+    glGenTextures(1, &m_textureId);
+    glBindTexture(GL_TEXTURE_2D, m_textureId);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+    glBindTexture(GL_TEXTURE_2D, 0);
+
+    m_initialized = true;
+}
+
+void AVOpenGLWidget::resizeGL(int width, int height)
+{
+    glViewport(0, 0, width, height);
+}
+
+// 文本转OpenGL纹理
+void AVOpenGLWidget::updateTipTexture()
+{
+    QFont font;
+    font.setPointSize(48);
+    font.setBold(true);
+    QFontMetrics fm(font);
+    int w = fm.horizontalAdvance(m_noVideoTip) + 40;
+    int h = fm.height() + 40;
+    QImage img(w, h, QImage::Format_ARGB32_Premultiplied);
+    img.fill(Qt::transparent);
+    QPainter p(&img);
+    p.setFont(font);
+    p.setPen(Qt::white);
+    p.setRenderHint(QPainter::Antialiasing);
+    p.drawText(img.rect(), Qt::AlignCenter, m_noVideoTip);
+    p.end();
+    m_tipImage = img;
+    if (m_tipTexture) {
+        glDeleteTextures(1, &m_tipTexture);
+    }
+    glGenTextures(1, &m_tipTexture);
+    glBindTexture(GL_TEXTURE_2D, m_tipTexture);
+    glTexImage2D(GL_TEXTURE_2D,
+                 0,
+                 GL_RGBA,
+                 img.width(),
+                 img.height(),
+                 0,
+                 GL_BGRA,
+                 GL_UNSIGNED_BYTE,
+                 img.bits());
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+    glBindTexture(GL_TEXTURE_2D, 0);
+}
+
+// 3D绘制方法
+void AVOpenGLWidget::drawNoVideoTip3D()
+{
+    glClearColor(0, 0, 0, 1);
+    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
+    glEnable(GL_DEPTH_TEST);
+    glEnable(GL_TEXTURE_2D);
+    glBindTexture(GL_TEXTURE_2D, m_tipTexture);
+    glMatrixMode(GL_PROJECTION);
+    glLoadIdentity();
+    float aspect = float(width()) / float(height());
+    glOrtho(-aspect, aspect, -1, 1, -10, 10);
+    glMatrixMode(GL_MODELVIEW);
+    glLoadIdentity();
+    glTranslatef(0, 0, 0);
+    glRotatef(m_tipAngle, 0, 1, 0);
+    float w = float(m_tipImage.width()) / width();
+    float h = float(m_tipImage.height()) / height();
+    glBegin(GL_QUADS);
+    glTexCoord2f(0, 1);
+    glVertex3f(-w, -h, 0);
+    glTexCoord2f(1, 1);
+    glVertex3f(w, -h, 0);
+    glTexCoord2f(1, 0);
+    glVertex3f(w, h, 0);
+    glTexCoord2f(0, 0);
+    glVertex3f(-w, h, 0);
+    glEnd();
+    glBindTexture(GL_TEXTURE_2D, 0);
+    glDisable(GL_TEXTURE_2D);
+    glDisable(GL_DEPTH_TEST);
+}
+
+void AVOpenGLWidget::paintGL()
+{
+    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
+    QMutexLocker locker(&m_mutex);
+    if (!m_frameData || m_frameWidth <= 0 || m_frameHeight <= 0 || !m_frameUpdated) {
+        if (m_tipTexture == 0)
+            updateTipTexture();
+        drawNoVideoTip3D();
+        return;
+    }
+    // 绑定纹理并更新数据
+    glBindTexture(GL_TEXTURE_2D, m_textureId);
+    glTexImage2D(GL_TEXTURE_2D,
+                 0,
+                 GL_RGBA,
+                 m_frameWidth,
+                 m_frameHeight,
+                 0,
+                 GL_RGBA,
+                 GL_UNSIGNED_BYTE,
+                 m_frameData);
+    m_program->bind();
+    m_program->setUniformValue("texture", 0);
+    m_program->setUniformValue("uTextureSize", QVector2D(m_frameWidth, m_frameHeight));
+    m_program->setUniformValue("uGray", m_gray);
+    m_program->setUniformValue("uThreshold", m_threshold);
+    m_program->setUniformValue("uThresholdValue", m_thresholdValue);
+    m_program->setUniformValue("uBlur", m_blur);
+    m_program->setUniformValue("uBlurRadius", m_blurRadius);
+    m_program->setUniformValue("uReverse", m_reverse);
+    m_program->setUniformValue("uColorReduce", m_colorReduce);
+    m_program->setUniformValue("uColorReduceLevel", m_colorReduceLevel);
+    m_program->setUniformValue("uGamma", m_gamma);
+    m_program->setUniformValue("uGammaValue", m_gammaValue);
+    m_program->setUniformValue("uContrastBright", m_contrastBright);
+    m_program->setUniformValue("uContrast", m_contrast);
+    m_program->setUniformValue("uBrightness", m_brightness);
+    m_program->setUniformValue("uMirror", m_mirror);
+    m_program->enableAttributeArray(0);
+    m_program->enableAttributeArray(1);
+    m_program->setAttributeArray(0, m_vertices, 2);
+    m_program->setAttributeArray(1, m_texCoords, 2);
+    // 保持比例
+    if (m_keepAspectRatio) {
+        QSize widgetSize = size();
+        double widgetRatio = double(widgetSize.width()) / widgetSize.height();
+        double videoRatio = double(m_frameWidth) / m_frameHeight;
+        int x = 0, y = 0, w = widgetSize.width(), h = widgetSize.height();
+        if (widgetRatio > videoRatio) {
+            w = int(h * videoRatio);
+            x = (widgetSize.width() - w) / 2;
+        } else {
+            h = int(w / videoRatio);
+            y = (widgetSize.height() - h) / 2;
+        }
+        glViewport(x, y, w, h);
+    } else {
+        glViewport(0, 0, width(), height());
+    }
+    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
+    m_program->disableAttributeArray(0);
+    m_program->disableAttributeArray(1);
+    m_program->release();
+    glBindTexture(GL_TEXTURE_2D, 0);
+}
+
+void AVOpenGLWidget::updateFrame(const AVOpenGLWidget::RGBAFrame& frame)
+{
+    if (!frame.data || frame.width <= 0 || frame.height <= 0)
+        return;
+
+    QMutexLocker locker(&m_mutex);
+
+    // 如果尺寸变化,重新分配内存
+    if (m_frameWidth != frame.width || m_frameHeight != frame.height) {
+        if (m_frameData) {
+            delete[] m_frameData;
+            m_frameData = nullptr;
+        }
+
+        m_frameWidth = frame.width;
+        m_frameHeight = frame.height;
+
+        // 检查内存分配大小是否合理
+        size_t dataSize = static_cast<size_t>(m_frameWidth) * m_frameHeight * 4;
+        if (dataSize > 0 && dataSize < SIZE_MAX / 4) {
+            try {
+                m_frameData = new unsigned char[dataSize]; // RGBA格式
+            } catch (const std::bad_alloc&) {
+                m_frameData = nullptr;
+                return;
+            }
+        } else {
+            return;
+        }
+    }
+
+    // 确保m_frameData已正确分配
+    if (!m_frameData) {
+        return;
+    }
+
+    // 复制帧数据
+    memcpy(m_frameData, frame.data, m_frameWidth * m_frameHeight * 4);
+    m_frameUpdated = true;
+
+    // 请求重绘
+    update();
+}
+
+bool AVOpenGLWidget::convertFromAVFrame(AVFrame* frame)
+{
+    if (!frame || frame->width <= 0 || frame->height <= 0 || !frame->data[0])
+        return false;
+
+    QMutexLocker locker(&m_mutex);
+
+    // 如果尺寸变化,重新分配内存
+    if (m_frameWidth != frame->width || m_frameHeight != frame->height) {
+        if (m_frameData) {
+            delete[] m_frameData;
+            m_frameData = nullptr;
+        }
+
+        m_frameWidth = frame->width;
+        m_frameHeight = frame->height;
+
+        // 检查内存分配大小是否合理
+        size_t dataSize = static_cast<size_t>(m_frameWidth) * m_frameHeight * 4;
+        if (dataSize > 0 && dataSize < SIZE_MAX / 4) {
+            try {
+                m_frameData = new unsigned char[dataSize]; // RGBA格式
+            } catch (const std::bad_alloc&) {
+                m_frameData = nullptr;
+                return false;
+            }
+        } else {
+            return false;
+        }
+    }
+
+    // 确保m_frameData已正确分配,即使尺寸没有变化
+    if (!m_frameData) {
+        // 尺寸没有变化但m_frameData为空,需要重新分配
+        size_t dataSize = static_cast<size_t>(m_frameWidth) * m_frameHeight * 4;
+        if (dataSize > 0 && dataSize < SIZE_MAX / 4) {
+            try {
+                m_frameData = new unsigned char[dataSize]; // RGBA格式
+            } catch (const std::bad_alloc&) {
+                m_frameData = nullptr;
+                return false;
+            }
+        } else {
+            return false;
+        }
+    }
+
+    // 检查源数据是否有效(防止frame.data[0]为空字符或无效数据)
+    if (!frame->data[0] || frame->linesize[0] <= 0) {
+        qDebug() << "Invalid frame data or linesize";
+        return false;
+    }
+
+    // 根据不同的像素格式进行转换
+    switch (frame->format) {
+    case AV_PIX_FMT_RGBA: {
+        // 直接复制RGBA数据
+        for (int y = 0; y < frame->height; y++) {
+            memcpy(m_frameData + y * m_frameWidth * 4,
+                   frame->data[0] + y * frame->linesize[0],
+                   frame->width * 4);
+        }
+    } break;
+    case AV_PIX_FMT_RGB24: {
+        // RGB24转RGBA
+        for (int y = 0; y < frame->height; y++) {
+            uint8_t* src = frame->data[0] + y * frame->linesize[0];
+            uint8_t* dst = m_frameData + y * m_frameWidth * 4;
+
+            for (int x = 0; x < frame->width; x++) {
+                *dst++ = *src++; // R
+                *dst++ = *src++; // G
+                *dst++ = *src++; // B
+                *dst++ = 255;    // A
+            }
+        }
+    } break;
+    case AV_PIX_FMT_BGR0:
+    case AV_PIX_FMT_BGRA: {
+        // BGRA转RGBA
+        for (int y = 0; y < frame->height; y++) {
+            uint8_t* src = frame->data[0] + y * frame->linesize[0];
+            uint8_t* dst = m_frameData + y * m_frameWidth * 4;
+
+            for (int x = 0; x < frame->width; x++) {
+                uint8_t b = *src++;
+                uint8_t g = *src++;
+                uint8_t r = *src++;
+                uint8_t a = *src++;
+
+                *dst++ = r;
+                *dst++ = g;
+                *dst++ = b;
+                *dst++ = a;
+            }
+        }
+    } break;
+    case AV_PIX_FMT_BGR24: {
+        // BGR24转RGBA
+        for (int y = 0; y < frame->height; y++) {
+            uint8_t* src = frame->data[0] + y * frame->linesize[0];
+            uint8_t* dst = m_frameData + y * m_frameWidth * 4;
+
+            for (int x = 0; x < frame->width; x++) {
+                uint8_t b = *src++;
+                uint8_t g = *src++;
+                uint8_t r = *src++;
+
+                *dst++ = r;   // R
+                *dst++ = g;   // G
+                *dst++ = b;   // B
+                *dst++ = 255; // A (设为不透明)
+            }
+        }
+    } break;
+    case AV_PIX_FMT_YUV420P: // 添加对YUV420P格式的支持
+    {
+        // 检查YUV平面数据是否有效
+        if (!frame->data[1] || !frame->data[2])
+            return false;
+
+        // YUV420P转RGBA
+        for (int y = 0; y < frame->height; y++) {
+            uint8_t* dst = m_frameData + y * m_frameWidth * 4;
+
+            for (int x = 0; x < frame->width; x++) {
+                int Y = frame->data[0][y * frame->linesize[0] + x];
+                int U = frame->data[1][(y / 2) * frame->linesize[1] + (x / 2)];
+                int V = frame->data[2][(y / 2) * frame->linesize[2] + (x / 2)];
+
+                // YUV转RGB公式
+                int C = Y - 16;
+                int D = U - 128;
+                int E = V - 128;
+
+                int R = (298 * C + 409 * E + 128) >> 8;
+                int G = (298 * C - 100 * D - 208 * E + 128) >> 8;
+                int B = (298 * C + 516 * D + 128) >> 8;
+
+                // 限制RGB值在0-255范围内
+                R = R < 0 ? 0 : (R > 255 ? 255 : R);
+                G = G < 0 ? 0 : (G > 255 ? 255 : G);
+                B = B < 0 ? 0 : (B > 255 ? 255 : B);
+
+                *dst++ = R;   // R
+                *dst++ = G;   // G
+                *dst++ = B;   // B
+                *dst++ = 255; // A
+            }
+        }
+    } break;
+    case AV_PIX_FMT_NV12: {
+        // 检查NV12平面数据是否有效
+        if (!frame->data[1])
+            return false;
+
+        // NV12转RGBA
+        for (int y = 0; y < frame->height; y++) {
+            uint8_t* dst = m_frameData + y * m_frameWidth * 4;
+
+            for (int x = 0; x < frame->width; x++) {
+                int Y = frame->data[0][y * frame->linesize[0] + x];
+                int U = frame->data[1][(y / 2) * frame->linesize[1] + (x / 2) * 2];
+                int V = frame->data[1][(y / 2) * frame->linesize[1] + (x / 2) * 2 + 1];
+
+                // YUV转RGB公式
+                int C = Y - 16;
+                int D = U - 128;
+                int E = V - 128;
+
+                int R = (298 * C + 409 * E + 128) >> 8;
+                int G = (298 * C - 100 * D - 208 * E + 128) >> 8;
+                int B = (298 * C + 516 * D + 128) >> 8;
+
+                // 限制RGB值在0-255范围内
+                R = R < 0 ? 0 : (R > 255 ? 255 : R);
+                G = G < 0 ? 0 : (G > 255 ? 255 : G);
+                B = B < 0 ? 0 : (B > 255 ? 255 : B);
+
+                *dst++ = R;   // R
+                *dst++ = G;   // G
+                *dst++ = B;   // B
+                *dst++ = 255; // A
+            }
+        }
+    } break;
+    default:
+        // 对于其他格式,可以考虑使用FFmpeg的sws_scale函数
+        qDebug() << "Unsupported pixel format:" << frame->format;
+        return false;
+    }
+
+    m_frameUpdated = true;
+    update();
+    return true;
+}
+
+bool AVOpenGLWidget::Render(AVFrame* frame)
+{
+    if (!m_initialized && isValid()) {
+        makeCurrent();
+        initializeGL();
+        doneCurrent();
+    }
+
+    if (!frame) {
+        update(); // 仅刷新显示
+        return true;
+    }
+
+    bool result = convertFromAVFrame(frame);
+
+    // 释放传入的AVFrame,因为现在使用QueuedConnection异步调用
+    // 需要在这里释放内存,避免内存泄漏
+    av_frame_free(&frame);
+
+    return result;
+}
+
+void AVOpenGLWidget::clearFrame()
+{
+    QMutexLocker locker(&m_mutex);
+    m_frameUpdated = false;
+    update();
+}
+
+void AVOpenGLWidget::setGray(bool on)
+{
+    if (m_gray != on) {
+        m_gray = on;
+        update();
+    }
+}
+void AVOpenGLWidget::setThreshold(bool on, float value)
+{
+    if (m_threshold != on || m_thresholdValue != value) {
+        m_threshold = on;
+        m_thresholdValue = value;
+        update();
+    }
+}
+void AVOpenGLWidget::setBlur(bool on, float radius)
+{
+    if (m_blur != on || m_blurRadius != radius) {
+        m_blur = on;
+        m_blurRadius = radius;
+        update();
+    }
+}
+void AVOpenGLWidget::setReverse(bool on)
+{
+    if (m_reverse != on) {
+        m_reverse = on;
+        update();
+    }
+}
+void AVOpenGLWidget::setColorReduce(bool on, int level)
+{
+    if (m_colorReduce != on || m_colorReduceLevel != level) {
+        m_colorReduce = on;
+        m_colorReduceLevel = level;
+        update();
+    }
+}
+void AVOpenGLWidget::setGamma(bool on, float gamma)
+{
+    if (m_gamma != on || m_gammaValue != gamma) {
+        m_gamma = on;
+        m_gammaValue = gamma;
+        update();
+    }
+}
+void AVOpenGLWidget::setContrastBright(bool on, float contrast, float brightness)
+{
+    if (m_contrastBright != on || m_contrast != contrast || m_brightness != brightness) {
+        m_contrastBright = on;
+        m_contrast = contrast;
+        m_brightness = brightness;
+        update();
+    }
+}
+void AVOpenGLWidget::setMirror(bool on)
+{
+    if (m_mirror != on) {
+        m_mirror = on;
+        update();
+    }
+}
+
+void AVOpenGLWidget::setNoVideoTip(const QString& tip)
+{
+    if (m_noVideoTip != tip) {
+        m_noVideoTip = tip;
+        updateTipTexture();
+        update();
+    }
+}
+
+void AVOpenGLWidget::showEndTip(const QString& tip)
+{
+    QMutexLocker locker(&m_mutex);
+    m_noVideoTip = tip;
+    if (m_tipTexture) {
+        glDeleteTextures(1, &m_tipTexture);
+        m_tipTexture = 0;
+    }
+    m_frameUpdated = false;
+    if (m_frameData) {
+        delete[] m_frameData;
+        m_frameData = nullptr;
+    }
+    updateTipTexture();
+    update();
+}
+
+void AVOpenGLWidget::onShowYUV(QSharedPointer<VideoFrame> frame)
+{
+    if (!frame || frame->getPixelW() <= 0 || frame->getPixelH() <= 0) {
+        return;
+    }
+
+    QMutexLocker locker(&m_mutex);
+
+    uint32_t width = frame->getPixelW();
+    uint32_t height = frame->getPixelH();
+
+    // 如果尺寸变化,重新分配内存
+    if (m_frameWidth != static_cast<int>(width) || m_frameHeight != static_cast<int>(height)) {
+        if (m_frameData) {
+            delete[] m_frameData;
+            m_frameData = nullptr;
+        }
+
+        m_frameWidth = static_cast<int>(width);
+        m_frameHeight = static_cast<int>(height);
+
+        // 检查内存分配大小是否合理
+        size_t dataSize = static_cast<size_t>(m_frameWidth) * m_frameHeight * 4;
+        if (dataSize > 0 && dataSize < SIZE_MAX / 4) {
+            try {
+                m_frameData = new unsigned char[dataSize]; // RGBA格式
+            } catch (const std::bad_alloc&) {
+                m_frameData = nullptr;
+                return;
+            }
+        } else {
+            return;
+        }
+    }
+
+    // 确保m_frameData已正确分配
+    if (!m_frameData) {
+        size_t dataSize = static_cast<size_t>(m_frameWidth) * m_frameHeight * 4;
+        if (dataSize > 0 && dataSize < SIZE_MAX / 4) {
+            try {
+                m_frameData = new unsigned char[dataSize]; // RGBA格式
+            } catch (const std::bad_alloc&) {
+                m_frameData = nullptr;
+                return;
+            }
+        } else {
+            return;
+        }
+    }
+
+    AVPixelFormat fmt = frame->getFormat();
+    uint8_t* y = frame->getData(0);
+    uint8_t* u = frame->getData(1);
+    uint8_t* v = frame->getData(2);
+    int lsY = frame->getLineSize(0);
+    int lsU = frame->getLineSize(1);
+    int lsV = frame->getLineSize(2);
+
+    if (!y)
+        return;
+
+    auto clamp = [](int v) { return v < 0 ? 0 : (v > 255 ? 255 : v); };
+
+    switch (fmt) {
+    case AV_PIX_FMT_YUV420P:
+    case AV_PIX_FMT_YUVJ420P:
+        if (!u || !v)
+            return;
+        for (int yy = 0; yy < m_frameHeight; ++yy) {
+            uint8_t* dst = m_frameData + yy * m_frameWidth * 4;
+            const uint8_t* srcY = y + yy * lsY;
+            const uint8_t* srcU = u + (yy / 2) * lsU;
+            const uint8_t* srcV = v + (yy / 2) * lsV;
+            for (int xx = 0; xx < m_frameWidth; ++xx) {
+                int Y = srcY[xx];
+                int Uc = srcU[xx / 2];
+                int Vc = srcV[xx / 2];
+                int C = Y - 16, D = Uc - 128, E = Vc - 128;
+                int R = (298 * C + 409 * E + 128) >> 8;
+                int G = (298 * C - 100 * D - 208 * E + 128) >> 8;
+                int B = (298 * C + 516 * D + 128) >> 8;
+                *dst++ = clamp(R);
+                *dst++ = clamp(G);
+                *dst++ = clamp(B);
+                *dst++ = 255;
+            }
+        }
+        break;
+    case AV_PIX_FMT_YUV422P:
+        if (!u || !v)
+            return;
+        for (int yy = 0; yy < m_frameHeight; ++yy) {
+            uint8_t* dst = m_frameData + yy * m_frameWidth * 4;
+            const uint8_t* srcY = y + yy * lsY;
+            const uint8_t* srcU = u + yy * lsU;
+            const uint8_t* srcV = v + yy * lsV;
+            for (int xx = 0; xx < m_frameWidth; ++xx) {
+                int Y = srcY[xx];
+                int Uc = srcU[xx / 2];
+                int Vc = srcV[xx / 2];
+                int C = Y - 16, D = Uc - 128, E = Vc - 128;
+                int R = (298 * C + 409 * E + 128) >> 8;
+                int G = (298 * C - 100 * D - 208 * E + 128) >> 8;
+                int B = (298 * C + 516 * D + 128) >> 8;
+                *dst++ = clamp(R);
+                *dst++ = clamp(G);
+                *dst++ = clamp(B);
+                *dst++ = 255;
+            }
+        }
+        break;
+    case AV_PIX_FMT_YUV444P:
+        if (!u || !v)
+            return;
+        for (int yy = 0; yy < m_frameHeight; ++yy) {
+            uint8_t* dst = m_frameData + yy * m_frameWidth * 4;
+            const uint8_t* srcY = y + yy * lsY;
+            const uint8_t* srcU = u + yy * lsU;
+            const uint8_t* srcV = v + yy * lsV;
+            for (int xx = 0; xx < m_frameWidth; ++xx) {
+                int Y = srcY[xx];
+                int Uc = srcU[xx];
+                int Vc = srcV[xx];
+                int C = Y - 16, D = Uc - 128, E = Vc - 128;
+                int R = (298 * C + 409 * E + 128) >> 8;
+                int G = (298 * C - 100 * D - 208 * E + 128) >> 8;
+                int B = (298 * C + 516 * D + 128) >> 8;
+                *dst++ = clamp(R);
+                *dst++ = clamp(G);
+                *dst++ = clamp(B);
+                *dst++ = 255;
+            }
+        }
+        break;
+    case AV_PIX_FMT_NV12: // Y + UV interleaved
+        if (!u)
+            return;
+        for (int yy = 0; yy < m_frameHeight; ++yy) {
+            uint8_t* dst = m_frameData + yy * m_frameWidth * 4;
+            const uint8_t* srcY = y + yy * lsY;
+            const uint8_t* srcUV = u + (yy / 2) * lsU;
+            for (int xx = 0; xx < m_frameWidth; ++xx) {
+                int Y = srcY[xx];
+                int Uc = srcUV[(xx / 2) * 2 + 0];
+                int Vc = srcUV[(xx / 2) * 2 + 1];
+                int C = Y - 16, D = Uc - 128, E = Vc - 128;
+                int R = (298 * C + 409 * E + 128) >> 8;
+                int G = (298 * C - 100 * D - 208 * E + 128) >> 8;
+                int B = (298 * C + 516 * D + 128) >> 8;
+                *dst++ = clamp(R);
+                *dst++ = clamp(G);
+                *dst++ = clamp(B);
+                *dst++ = 255;
+            }
+        }
+        break;
+    case AV_PIX_FMT_NV21: // Y + VU interleaved
+        if (!u)
+            return;
+        for (int yy = 0; yy < m_frameHeight; ++yy) {
+            uint8_t* dst = m_frameData + yy * m_frameWidth * 4;
+            const uint8_t* srcY = y + yy * lsY;
+            const uint8_t* srcVU = u + (yy / 2) * lsU;
+            for (int xx = 0; xx < m_frameWidth; ++xx) {
+                int Y = srcY[xx];
+                int Vc = srcVU[(xx / 2) * 2 + 0];
+                int Uc = srcVU[(xx / 2) * 2 + 1];
+                int C = Y - 16, D = Uc - 128, E = Vc - 128;
+                int R = (298 * C + 409 * E + 128) >> 8;
+                int G = (298 * C - 100 * D - 208 * E + 128) >> 8;
+                int B = (298 * C + 516 * D + 128) >> 8;
+                *dst++ = clamp(R);
+                *dst++ = clamp(G);
+                *dst++ = clamp(B);
+                *dst++ = 255;
+            }
+        }
+        break;
+    case AV_PIX_FMT_GRAY8:
+        for (int yy = 0; yy < m_frameHeight; ++yy) {
+            uint8_t* dst = m_frameData + yy * m_frameWidth * 4;
+            const uint8_t* srcY = y + yy * lsY;
+            for (int xx = 0; xx < m_frameWidth; ++xx) {
+                int Y = srcY[xx];
+                *dst++ = Y;
+                *dst++ = Y;
+                *dst++ = Y;
+                *dst++ = 255;
+            }
+        }
+        break;
+    case AV_PIX_FMT_RGB24:
+        for (int yy = 0; yy < m_frameHeight; ++yy) {
+            uint8_t* dst = m_frameData + yy * m_frameWidth * 4;
+            const uint8_t* src = y + yy * lsY;
+            for (int xx = 0; xx < m_frameWidth; ++xx) {
+                uint8_t r = src[xx * 3 + 0];
+                uint8_t g = src[xx * 3 + 1];
+                uint8_t b = src[xx * 3 + 2];
+                *dst++ = r;
+                *dst++ = g;
+                *dst++ = b;
+                *dst++ = 255;
+            }
+        }
+        break;
+    case AV_PIX_FMT_RGBA:
+        for (int yy = 0; yy < m_frameHeight; ++yy) {
+            uint8_t* dst = m_frameData + yy * m_frameWidth * 4;
+            const uint8_t* src = y + yy * lsY;
+            memcpy(dst, src, m_frameWidth * 4);
+        }
+        break;
+    default:
+        // 不支持的格式:尝试走现有 convertFromAVFrame 流程(如果可用),否则返回
+        // 这里无法拿到 AVFrame*,先简单忽略
+        return;
+    }
+
+    m_frameUpdated = true;
+    update();
+}

+ 117 - 0
libs/AVPlayer/avopenglwidget.h

@@ -0,0 +1,117 @@
+#pragma once
+
+#include <QImage>
+#include <QMutex>
+#include <QOpenGLFunctions>
+#include <QOpenGLShaderProgram>
+#include <QOpenGLTexture>
+#include <QOpenGLWidget>
+#include <QTimer>
+
+#include "AVPlayer/ffmpeg_compat.h"
+
+#include "AVPlayer/vframe.h"
+
+class AVOpenGLWidget : public QOpenGLWidget, protected QOpenGLFunctions
+{
+    Q_OBJECT
+public:
+    // 定义 RGBAFrame 结构体(避免与全局 VideoFrame 冲突)
+    struct RGBAFrame
+    {
+        unsigned char* data;
+        int width;
+        int height;
+        int format;
+    };
+
+    explicit AVOpenGLWidget(QWidget* parent = nullptr);
+    ~AVOpenGLWidget();
+
+    // 添加与 VideoRender 类似的接口
+    bool Open(unsigned int width, unsigned int height);
+    void Close();
+
+public slots:
+    bool Render(AVFrame* frame);
+
+    // 原有接口
+    void updateFrame(const RGBAFrame& frame);
+    void clearFrame();
+
+    // 添加从 AVFrame 转换的方法
+    bool convertFromAVFrame(AVFrame* frame);
+
+    // 添加对 YUV 多格式帧的支持(使用全局 VideoFrame)
+    void onShowYUV(QSharedPointer<VideoFrame> frame);
+
+    void setKeepAspectRatio(bool keep)
+    {
+        m_keepAspectRatio = keep;
+        update();
+    }
+    bool keepAspectRatio() const { return m_keepAspectRatio; }
+
+    // 效果控制接口
+    void setGray(bool on);
+    void setThreshold(bool on, float value = 0.5f);
+    void setBlur(bool on, float radius = 1.0f);
+    void setReverse(bool on);
+    void setColorReduce(bool on, int level = 8);
+    void setGamma(bool on, float gamma = 1.0f);
+    void setContrastBright(bool on, float contrast = 1.0f, float brightness = 0.0f);
+    void setMirror(bool on);
+
+    void setNoVideoTip(const QString& tip);
+    void showEndTip(const QString& tip);
+
+protected:
+    void initializeGL() override;
+    void paintGL() override;
+    void resizeGL(int width, int height) override;
+
+private:
+    QOpenGLShaderProgram* m_program;
+    GLuint m_textureId; // 使用原生OpenGL纹理ID
+
+    // 存储原始帧数据
+    unsigned char* m_frameData;
+    int m_frameWidth;
+    int m_frameHeight;
+    int m_frameFormat; // 可以用来表示像素格式
+
+    QMutex m_mutex;
+    bool m_frameUpdated;
+    bool m_initialized;
+
+    // 顶点和纹理坐标
+    GLfloat m_vertices[8];
+    GLfloat m_texCoords[8];
+
+    bool m_keepAspectRatio = true;
+
+    // 效果参数
+    bool m_gray = false;
+    bool m_threshold = false;
+    float m_thresholdValue = 0.5f;
+    bool m_blur = false;
+    float m_blurRadius = 1.0f;
+    bool m_reverse = false;
+    bool m_colorReduce = false;
+    int m_colorReduceLevel = 8;
+    bool m_gamma = false;
+    float m_gammaValue = 1.0f;
+    bool m_contrastBright = false;
+    float m_contrast = 1.0f;
+    float m_brightness = 0.0f;
+
+    bool m_mirror = false;
+
+    QString m_noVideoTip;
+    QImage m_tipImage;
+    GLuint m_tipTexture;
+    float m_tipAngle;
+    QTimer* m_tipTimer;
+    void updateTipTexture();
+    void drawNoVideoTip3D();
+};

+ 213 - 0
libs/AVPlayer/avplayerwidget.cpp

@@ -0,0 +1,213 @@
+#include "avplayerwidget.h"
+
+#include "avopenglwidget.h"
+
+#include "av_player.h"
+#include "vframe.h"
+
+AVPlayerWidget::AVPlayerWidget(QWidget *parent)
+    : QWidget{parent}
+    , m_player(new AVPlayer)
+    , m_openglWidget(new AVOpenGLWidget(this))
+    , m_isPlaying(false)
+    , m_isPaused(false)
+{
+    setupUI();
+    connectSignals();
+    
+    // 设置默认音量
+    m_player->setVolume(50);
+    m_volumeSlider->setValue(50);
+    
+    // 设置默认测试URL
+    m_urlEdit->setText("rtmp://127.0.0.1:1935/stream/V1/stream");
+}
+
+AVPlayerWidget::~AVPlayerWidget()
+{
+    if (m_player) {
+        m_player->clearPlayer();
+    }
+}
+
+void AVPlayerWidget::setupUI()
+{
+    m_mainLayout = new QVBoxLayout(this);
+    
+    // 添加OpenGL视频渲染组件
+    m_mainLayout->addWidget(m_openglWidget, 1);
+    
+    // 创建控制面板
+    m_controlLayout = new QHBoxLayout();
+    
+    // URL输入框
+    m_urlEdit = new QLineEdit(this);
+    m_urlEdit->setPlaceholderText("输入视频文件路径或URL");
+    m_controlLayout->addWidget(m_urlEdit, 2);
+    
+    // 测试播放按钮
+    m_testPlayButton = new QPushButton("测试播放", this);
+    m_controlLayout->addWidget(m_testPlayButton);
+    
+    // 播放控制按钮
+    m_playButton = new QPushButton("播放", this);
+    m_pauseButton = new QPushButton("暂停", this);
+    m_stopButton = new QPushButton("停止", this);
+    
+    m_controlLayout->addWidget(m_playButton);
+    m_controlLayout->addWidget(m_pauseButton);
+    m_controlLayout->addWidget(m_stopButton);
+    
+    // 音量控制
+    QLabel *volumeLabel = new QLabel("音量:", this);
+    m_volumeSlider = new QSlider(Qt::Horizontal, this);
+    m_volumeSlider->setRange(0, 100);
+    m_volumeSlider->setValue(50);
+    m_volumeSlider->setMaximumWidth(100);
+    
+    m_controlLayout->addWidget(volumeLabel);
+    m_controlLayout->addWidget(m_volumeSlider);
+    
+    // 时间显示
+    m_timeLabel = new QLabel("00:00 / 00:00", this);
+    m_controlLayout->addWidget(m_timeLabel);
+    
+    m_mainLayout->addLayout(m_controlLayout);
+    
+    // 设置初始状态
+    m_pauseButton->setEnabled(false);
+    m_stopButton->setEnabled(false);
+}
+
+void AVPlayerWidget::connectSignals()
+{
+    // 连接AVPlayer信号
+    connect(m_player, &AVPlayer::frameChanged, this, &AVPlayerWidget::onFrameChanged, Qt::QueuedConnection);
+    connect(m_player, &AVPlayer::AVDurationChanged, this, &AVPlayerWidget::durationChangedSlot);
+    connect(m_player, &AVPlayer::AVPtsChanged, this, &AVPlayerWidget::ptsChangedSlot);
+    connect(m_player, &AVPlayer::AVTerminate, this, &AVPlayerWidget::terminateSlot, Qt::QueuedConnection);
+    
+    // 连接UI控件信号
+    connect(m_playButton, &QPushButton::clicked, this, &AVPlayerWidget::onPlayButtonClicked);
+    connect(m_pauseButton, &QPushButton::clicked, this, &AVPlayerWidget::onPauseButtonClicked);
+    connect(m_stopButton, &QPushButton::clicked, this, &AVPlayerWidget::onStopButtonClicked);
+    connect(m_testPlayButton, &QPushButton::clicked, this, &AVPlayerWidget::onTestPlayButtonClicked);
+    connect(m_volumeSlider, &QSlider::valueChanged, this, &AVPlayerWidget::onVolumeChanged);
+}
+
+void AVPlayerWidget::play(const QString &url)
+{
+    if (!m_player->play(url)) {
+        // 提示反馈?
+        return;
+    }
+    m_isPlaying = true;
+    m_isPaused = false;
+    m_playButton->setEnabled(false);
+    m_pauseButton->setEnabled(true);
+    m_stopButton->setEnabled(true);
+    emit playStateChanged(true);
+}
+
+void AVPlayerWidget::stop()
+{
+    m_player->clearPlayer();
+    m_isPlaying = false;
+    m_isPaused = false;
+    m_playButton->setEnabled(true);
+    m_pauseButton->setEnabled(false);
+    m_stopButton->setEnabled(false);
+    m_openglWidget->clearFrame();
+    emit playStateChanged(false);
+}
+
+void AVPlayerWidget::pause()
+{
+    if (m_isPlaying && !m_isPaused) {
+        m_player->pause(true);
+        m_isPaused = true;
+        m_playButton->setText("继续");
+        m_playButton->setEnabled(true);
+        m_pauseButton->setEnabled(false);
+    }
+}
+
+void AVPlayerWidget::resume()
+{
+    if (m_isPlaying && m_isPaused) {
+        m_player->pause(false);
+        m_isPaused = false;
+        m_playButton->setText("播放");
+        m_playButton->setEnabled(false);
+        m_pauseButton->setEnabled(true);
+    }
+}
+
+void AVPlayerWidget::onPlayButtonClicked()
+{
+    if (m_isPaused) {
+        resume();
+    } else {
+        QString url = m_urlEdit->text().trimmed();
+        if (!url.isEmpty()) {
+            play(url);
+        }
+    }
+}
+
+void AVPlayerWidget::onPauseButtonClicked()
+{
+    pause();
+}
+
+void AVPlayerWidget::onStopButtonClicked()
+{
+    stop();
+}
+
+void AVPlayerWidget::onTestPlayButtonClicked()
+{
+    // 使用默认测试视频
+    QString testUrl = "C:/Users/zhuizhu/Videos/2.mp4";
+    m_urlEdit->setText(testUrl);
+    play(testUrl);
+}
+
+void AVPlayerWidget::onVolumeChanged(int volume)
+{
+    m_player->setVolume(volume);
+}
+
+void AVPlayerWidget::onFrameChanged(QSharedPointer<VideoFrame> frame)
+{
+    m_openglWidget->onShowYUV(frame);
+
+    // 注意:这里不要释放avFrame,因为Render方法会处理
+}
+
+void AVPlayerWidget::ptsChangedSlot(unsigned int pts)
+{
+    // 更新时间显示
+    int seconds = pts / 1000;
+    int minutes = seconds / 60;
+    seconds = seconds % 60;
+    
+    QString timeStr = QString("%1:%2").arg(minutes, 2, 10, QChar('0')).arg(seconds, 2, 10, QChar('0'));
+    // 这里可以更新时间标签的当前时间部分
+}
+
+void AVPlayerWidget::durationChangedSlot(unsigned int duration)
+{
+    // 更新总时长显示
+    int seconds = duration / 1000;
+    int minutes = seconds / 60;
+    seconds = seconds % 60;
+    
+    QString durationStr = QString("%1:%2").arg(minutes, 2, 10, QChar('0')).arg(seconds, 2, 10, QChar('0'));
+    m_timeLabel->setText(QString("00:00 / %1").arg(durationStr));
+}
+
+void AVPlayerWidget::terminateSlot()
+{
+    stop();
+}

+ 69 - 0
libs/AVPlayer/avplayerwidget.h

@@ -0,0 +1,69 @@
+#ifndef AVPLAYERWIDGET_H
+#define AVPLAYERWIDGET_H
+
+#include <QWidget>
+#include <QPushButton>
+#include <QVBoxLayout>
+#include <QHBoxLayout>
+#include <QLabel>
+#include <QSlider>
+#include <QLineEdit>
+#include <QSharedPointer>
+
+class AVPlayer;
+class AVOpenGLWidget;
+class VideoFrame;
+
+class AVPlayerWidget : public QWidget
+{
+    Q_OBJECT
+public:
+    explicit AVPlayerWidget(QWidget *parent = nullptr);
+    ~AVPlayerWidget();
+
+    void play(const QString &url);
+    void stop();
+    void pause();
+    void resume();
+    
+private Q_SLOTS:
+    void ptsChangedSlot(unsigned int duration);
+    void durationChangedSlot(unsigned int pts);
+    void terminateSlot();
+    
+    // UI控制槽函数
+    void onPlayButtonClicked();
+    void onStopButtonClicked();
+    void onPauseButtonClicked();
+    void onTestPlayButtonClicked();
+    void onVolumeChanged(int volume);
+    
+    // 视频帧处理
+    void onFrameChanged(QSharedPointer<VideoFrame> frame);
+    
+signals:
+    void playStateChanged(bool isPlaying);
+
+private:
+    void setupUI();
+    void connectSignals();
+    
+    AVPlayer *m_player;
+    AVOpenGLWidget *m_openglWidget;
+    
+    // UI组件
+    QVBoxLayout *m_mainLayout;
+    QHBoxLayout *m_controlLayout;
+    QPushButton *m_playButton;
+    QPushButton *m_pauseButton;
+    QPushButton *m_stopButton;
+    QPushButton *m_testPlayButton;
+    QLabel *m_timeLabel;
+    QSlider *m_volumeSlider;
+    QLineEdit *m_urlEdit;
+    
+    bool m_isPlaying;
+    bool m_isPaused;
+};
+
+#endif // AVPLAYERWIDGET_H

+ 567 - 0
libs/AVPlayer/ffmpeg_compat.h

@@ -0,0 +1,567 @@
+#pragma once
+
+/**
+ * FFmpeg Version Compatibility Header
+ * This file provides compatibility macros and functions for different FFmpeg versions
+ * Supports FFmpeg 3.x, 4.x, 5.x, 6.x, and 7.x
+ */
+
+extern "C" {
+#include <libavcodec/avcodec.h>
+#include <libavcodec/bsf.h>
+#include <libavdevice/avdevice.h>
+#include <libavformat/avformat.h>
+#include <libavutil/channel_layout.h>
+#include <libavutil/frame.h>
+#include <libavutil/imgutils.h>
+#include <libavutil/pixfmt.h>
+#include <libavutil/time.h>
+#include <libswresample/swresample.h>
+#include <libswscale/swscale.h>
+}
+
+// FFmpeg version detection
+#if LIBAVFORMAT_VERSION_INT < AV_VERSION_INT(58, 9, 100)
+    #define FFMPEG_VERSION_MAJOR 3
+#elif LIBAVFORMAT_VERSION_INT < AV_VERSION_INT(59, 0, 100)
+    #define FFMPEG_VERSION_MAJOR 4
+#elif LIBAVFORMAT_VERSION_INT < AV_VERSION_INT(60, 0, 100)
+    #define FFMPEG_VERSION_MAJOR 5
+#elif LIBAVFORMAT_VERSION_INT < AV_VERSION_INT(61, 0, 100)
+    #define FFMPEG_VERSION_MAJOR 6
+#else
+    #define FFMPEG_VERSION_MAJOR 7
+#endif
+
+// Compatibility functions for deprecated functions
+inline void ffmpeg_register_all() {
+#if FFMPEG_VERSION_MAJOR < 4
+    av_register_all();
+#endif
+    // No-op in FFmpeg 4.0+
+}
+
+inline void ffmpeg_register_devices() {
+#if FFMPEG_VERSION_MAJOR < 4
+    avdevice_register_all();
+#endif
+    // No-op in FFmpeg 4.0+
+}
+
+// Compatibility for AVInputFormat/AVOutputFormat
+#if FFMPEG_VERSION_MAJOR >= 4
+    using FFmpegInputFormat = const AVInputFormat;
+    using FFmpegOutputFormat = const AVOutputFormat;
+#else
+    using FFmpegInputFormat = AVInputFormat;
+    using FFmpegOutputFormat = AVOutputFormat;
+#endif
+
+// Compatibility for AVCodec pointer
+#if FFMPEG_VERSION_MAJOR >= 7
+    using FFmpegCodec = const AVCodec;
+#else
+    using FFmpegCodec = AVCodec;
+#endif
+
+// Compatibility for av_err2str macro (C++ safe version)
+#ifdef __cplusplus
+#undef av_err2str
+#ifdef _MSC_VER
+#include <malloc.h>
+#define av_err2str(errnum) av_make_error_string((char*)_alloca(AV_ERROR_MAX_STRING_SIZE), AV_ERROR_MAX_STRING_SIZE, errnum)
+#else
+#define av_err2str(errnum) av_make_error_string((char*)__builtin_alloca(AV_ERROR_MAX_STRING_SIZE), AV_ERROR_MAX_STRING_SIZE, errnum)
+#endif
+#endif
+
+// Compatibility for bitstream filter context
+#if FFMPEG_VERSION_MAJOR >= 4
+    using AVBitStreamFilterContext = AVBSFContext;
+#else
+    // For FFmpeg 3.x, AVBitStreamFilterContext is already defined
+    // No need to redefine it
+#endif
+
+// Compatibility functions for codec context access
+inline AVMediaType ffmpeg_get_codec_type(AVStream* stream) {
+#if FFMPEG_VERSION_MAJOR >= 4
+    return stream->codecpar->codec_type;
+#else
+    return stream->codec->codec_type;
+#endif
+}
+
+inline AVCodecID ffmpeg_get_codec_id(AVStream* stream) {
+#if FFMPEG_VERSION_MAJOR >= 4
+    return stream->codecpar->codec_id;
+#else
+    return stream->codec->codec_id;
+#endif
+}
+
+inline int ffmpeg_get_codec_width(AVStream* stream) {
+#if FFMPEG_VERSION_MAJOR >= 4
+    return stream->codecpar->width;
+#else
+    return stream->codec->width;
+#endif
+}
+
+inline int ffmpeg_get_codec_height(AVStream* stream) {
+#if FFMPEG_VERSION_MAJOR >= 4
+    return stream->codecpar->height;
+#else
+    return stream->codec->height;
+#endif
+}
+
+inline AVPixelFormat ffmpeg_get_codec_pix_fmt(AVStream* stream) {
+#if FFMPEG_VERSION_MAJOR >= 4
+    return static_cast<AVPixelFormat>(stream->codecpar->format);
+#else
+    return stream->codec->pix_fmt;
+#endif
+}
+
+inline int ffmpeg_get_codec_sample_rate(AVStream* stream) {
+#if FFMPEG_VERSION_MAJOR >= 4
+    return stream->codecpar->sample_rate;
+#else
+    return stream->codec->sample_rate;
+#endif
+}
+
+inline int ffmpeg_get_codec_channels(AVStream* stream) {
+#if FFMPEG_VERSION_MAJOR >= 7
+    return stream->codecpar->ch_layout.nb_channels;
+#elif FFMPEG_VERSION_MAJOR >= 4
+    return stream->codecpar->channels;
+#else
+    return stream->codec->channels;
+#endif
+}
+
+inline uint64_t ffmpeg_get_codec_channel_layout(AVStream* stream) {
+#if FFMPEG_VERSION_MAJOR >= 7
+    // In FFmpeg 7, use ch_layout.u.mask directly if it's a mask layout
+    if (stream->codecpar->ch_layout.order == AV_CHANNEL_ORDER_NATIVE) {
+        return stream->codecpar->ch_layout.u.mask;
+    } else {
+        // For non-mask layouts, return a default stereo layout
+        return AV_CH_LAYOUT_STEREO;
+    }
+#elif FFMPEG_VERSION_MAJOR >= 4
+    return stream->codecpar->channel_layout;
+#else
+    return stream->codec->channel_layout;
+#endif
+}
+
+inline AVSampleFormat ffmpeg_get_codec_sample_fmt(AVStream* stream) {
+#if FFMPEG_VERSION_MAJOR >= 4
+    return static_cast<AVSampleFormat>(stream->codecpar->format);
+#else
+    return stream->codec->sample_fmt;
+#endif
+}
+
+// Compatibility for channel layout functions
+inline uint64_t ffmpeg_get_default_channel_layout(int channels) {
+#if FFMPEG_VERSION_MAJOR >= 7
+    AVChannelLayout ch_layout;
+    av_channel_layout_default(&ch_layout, channels);
+    if (ch_layout.order == AV_CHANNEL_ORDER_NATIVE) {
+        return ch_layout.u.mask;
+    } else {
+        // Fallback for common channel counts
+        switch (channels) {
+            case 1: return AV_CH_LAYOUT_MONO;
+            case 2: return AV_CH_LAYOUT_STEREO;
+            case 6: return AV_CH_LAYOUT_5POINT1;
+            case 8: return AV_CH_LAYOUT_7POINT1;
+            default: return AV_CH_LAYOUT_STEREO;
+        }
+    }
+#else
+    return av_get_default_channel_layout(channels);
+#endif
+}
+
+// Compatibility for filter registration
+inline void ffmpeg_register_filters() {
+#if FFMPEG_VERSION_MAJOR < 4
+    avfilter_register_all();
+#endif
+    // No-op in FFmpeg 4.0+
+}
+
+// Compatibility for channel layout string functions
+inline int ffmpeg_get_channel_layout_string(char *buf, int buf_size, int nb_channels, uint64_t channel_layout) {
+#if FFMPEG_VERSION_MAJOR >= 7
+    (void)nb_channels; // Suppress unused parameter warning
+    AVChannelLayout ch_layout;
+    av_channel_layout_from_mask(&ch_layout, channel_layout);
+    return av_channel_layout_describe(&ch_layout, buf, buf_size);
+#else
+    av_get_channel_layout_string(buf, buf_size, nb_channels, channel_layout);
+    return strlen(buf);
+#endif
+}
+
+// Compatibility for AVFrame channels access
+inline int ffmpeg_get_frame_channels(AVFrame* frame) {
+#if FFMPEG_VERSION_MAJOR >= 7
+    return frame->ch_layout.nb_channels;
+#else
+    return frame->channels;
+#endif
+}
+
+// Compatibility for AVFrame channel_layout access
+inline uint64_t ffmpeg_get_frame_channel_layout(AVFrame* frame) {
+#if FFMPEG_VERSION_MAJOR >= 7
+    if (frame->ch_layout.order == AV_CHANNEL_ORDER_NATIVE) {
+        return frame->ch_layout.u.mask;
+    } else {
+        return AV_CH_LAYOUT_STEREO;
+    }
+#else
+    return frame->channel_layout;
+#endif
+}
+
+// Compatibility for av_samples_get_buffer_size
+inline int ffmpeg_get_buffer_size(enum AVSampleFormat sample_fmt, int nb_channels, int nb_samples, int align) {
+#if FFMPEG_VERSION_MAJOR >= 7
+    return av_samples_get_buffer_size(nullptr, nb_channels, nb_samples, sample_fmt, align);
+#else
+    return av_samples_get_buffer_size(nullptr, nb_channels, nb_samples, sample_fmt, align);
+#endif
+}
+
+// Compatibility for codec context creation from stream
+inline AVCodecContext* ffmpeg_get_codec_context(AVStream* stream) {
+#if FFMPEG_VERSION_MAJOR >= 4
+    const AVCodec* codec = avcodec_find_decoder(stream->codecpar->codec_id);
+    if (!codec) return nullptr;
+    
+    AVCodecContext* ctx = avcodec_alloc_context3(codec);
+    if (!ctx) return nullptr;
+    
+    if (avcodec_parameters_to_context(ctx, stream->codecpar) < 0) {
+        avcodec_free_context(&ctx);
+        return nullptr;
+    }
+    
+    return ctx;
+#else
+    return stream->codec;
+#endif
+}
+
+// Compatibility for setting stream codec parameters
+inline void ffmpeg_set_stream_codec_id(AVStream* stream, enum AVCodecID codec_id) {
+#if FFMPEG_VERSION_MAJOR >= 4
+    stream->codecpar->codec_id = codec_id;
+#else
+    stream->codec->codec_id = codec_id;
+#endif
+}
+
+inline void ffmpeg_set_stream_codec_type(AVStream* stream, enum AVMediaType codec_type) {
+#if FFMPEG_VERSION_MAJOR >= 4
+    stream->codecpar->codec_type = codec_type;
+#else
+    stream->codec->codec_type = codec_type;
+#endif
+}
+
+inline void ffmpeg_set_stream_bit_rate(AVStream* stream, int64_t bit_rate) {
+#if FFMPEG_VERSION_MAJOR >= 4
+    stream->codecpar->bit_rate = bit_rate;
+#else
+    stream->codec->bit_rate = bit_rate;
+#endif
+}
+
+inline void ffmpeg_set_stream_pix_fmt(AVStream* stream, enum AVPixelFormat pix_fmt) {
+#if FFMPEG_VERSION_MAJOR >= 4
+    stream->codecpar->format = pix_fmt;
+#else
+    stream->codec->pix_fmt = pix_fmt;
+#endif
+}
+
+inline void ffmpeg_set_stream_dimensions(AVStream* stream, int width, int height) {
+#if FFMPEG_VERSION_MAJOR >= 4
+    stream->codecpar->width = width;
+    stream->codecpar->height = height;
+#else
+    stream->codec->width = width;
+    stream->codec->height = height;
+#endif
+}
+
+// Compatibility for stream codec parameters
+inline int ffmpeg_copy_codec_params_to_stream(AVStream* stream, AVCodecContext* codec_ctx) {
+#if FFMPEG_VERSION_MAJOR >= 4
+    return avcodec_parameters_from_context(stream->codecpar, codec_ctx);
+#else
+    return 0;  // No-op for older versions
+#endif
+}
+
+// Compatibility for extradata access
+inline uint8_t* ffmpeg_get_extradata(AVStream* stream) {
+#if FFMPEG_VERSION_MAJOR >= 4
+    return stream->codecpar->extradata;
+#else
+    return stream->codec->extradata;
+#endif
+}
+
+inline int ffmpeg_get_extradata_size(AVStream* stream) {
+#if FFMPEG_VERSION_MAJOR >= 4
+    return stream->codecpar->extradata_size;
+#else
+    return stream->codec->extradata_size;
+#endif
+}
+
+inline void ffmpeg_set_stream_extradata(AVStream* stream, uint8_t* data, int size) {
+#if FFMPEG_VERSION_MAJOR >= 4
+    stream->codecpar->extradata = data;
+    stream->codecpar->extradata_size = size;
+#else
+    stream->codec->extradata = data;
+    stream->codec->extradata_size = size;
+#endif
+}
+
+// Compatibility for AVCodecContext channels and channel_layout
+inline void ffmpeg_set_codec_channels(AVCodecContext* ctx, int channels) {
+#if FFMPEG_VERSION_MAJOR >= 7
+    av_channel_layout_default(&ctx->ch_layout, channels);
+#else
+    ctx->channels = channels;
+#endif
+}
+
+inline void ffmpeg_set_codec_channel_layout(AVCodecContext* ctx, uint64_t channel_layout) {
+#if FFMPEG_VERSION_MAJOR >= 7
+    av_channel_layout_from_mask(&ctx->ch_layout, channel_layout);
+#else
+    ctx->channel_layout = channel_layout;
+#endif
+}
+
+inline int ffmpeg_get_codec_context_channels(AVCodecContext* ctx) {
+#if FFMPEG_VERSION_MAJOR >= 7
+    return ctx->ch_layout.nb_channels;
+#else
+    return ctx->channels;
+#endif
+}
+
+// Compatibility for swr_alloc_set_opts
+inline SwrContext* ffmpeg_swr_alloc_set_opts(SwrContext *s,
+                                           int64_t out_ch_layout, enum AVSampleFormat out_sample_fmt, int out_sample_rate,
+                                           int64_t  in_ch_layout, enum AVSampleFormat  in_sample_fmt, int  in_sample_rate,
+                                           int log_offset, void *log_ctx) {
+#if FFMPEG_VERSION_MAJOR >= 7
+    SwrContext *swr_ctx = swr_alloc();
+    if (!swr_ctx) return NULL;
+    
+    AVChannelLayout out_ch_layout_new, in_ch_layout_new;
+    av_channel_layout_from_mask(&out_ch_layout_new, out_ch_layout);
+    av_channel_layout_from_mask(&in_ch_layout_new, in_ch_layout);
+    
+    av_opt_set_chlayout(swr_ctx, "ochl", &out_ch_layout_new, 0);
+    av_opt_set_int(swr_ctx, "osf", out_sample_fmt, 0);
+    av_opt_set_int(swr_ctx, "osr", out_sample_rate, 0);
+    av_opt_set_chlayout(swr_ctx, "ichl", &in_ch_layout_new, 0);
+    av_opt_set_int(swr_ctx, "isf", in_sample_fmt, 0);
+    av_opt_set_int(swr_ctx, "isr", in_sample_rate, 0);
+    
+    av_channel_layout_uninit(&out_ch_layout_new);
+    av_channel_layout_uninit(&in_ch_layout_new);
+    
+    return swr_ctx;
+#else
+    return swr_alloc_set_opts(s, out_ch_layout, out_sample_fmt, out_sample_rate,
+                             in_ch_layout, in_sample_fmt, in_sample_rate,
+                             log_offset, log_ctx);
+#endif
+}
+
+// Compatibility for codec parameters setting
+inline int ffmpeg_av_samples_alloc(uint8_t **audio_data, int *linesize, int nb_channels,
+                                   int nb_samples, enum AVSampleFormat sample_fmt, int align) {
+#if FFMPEG_VERSION_MAJOR >= 7
+    return av_samples_alloc_array_and_samples(&audio_data, linesize, nb_channels, nb_samples, sample_fmt, align);
+#else
+    return av_samples_alloc(audio_data, linesize, nb_channels, nb_samples, sample_fmt, align);
+#endif
+}
+
+inline void ffmpeg_set_frame_channel_layout(AVFrame* frame, AVCodecContext* codec_ctx) {
+#if FFMPEG_VERSION_MAJOR >= 7
+    av_channel_layout_copy(&frame->ch_layout, &codec_ctx->ch_layout);
+#else
+    frame->channel_layout = codec_ctx->channel_layout;
+#endif
+}
+
+inline void ffmpeg_set_stream_codec_params(AVStream* stream, AVCodecContext* codec_ctx) {
+#if FFMPEG_VERSION_MAJOR >= 7
+    stream->codecpar->codec_id = codec_ctx->codec_id;
+    stream->codecpar->bit_rate = codec_ctx->bit_rate;
+    stream->codecpar->width = codec_ctx->width;
+    stream->codecpar->height = codec_ctx->height;
+    stream->codecpar->format = codec_ctx->pix_fmt;
+    stream->codecpar->sample_rate = codec_ctx->sample_rate;
+    av_channel_layout_copy(&stream->codecpar->ch_layout, &codec_ctx->ch_layout);
+    stream->time_base = codec_ctx->time_base;
+#elif FFMPEG_VERSION_MAJOR >= 4
+    stream->codecpar->codec_id = codec_ctx->codec_id;
+    stream->codecpar->bit_rate = codec_ctx->bit_rate;
+    stream->codecpar->width = codec_ctx->width;
+    stream->codecpar->height = codec_ctx->height;
+    stream->codecpar->format = codec_ctx->pix_fmt;
+    stream->codecpar->sample_rate = codec_ctx->sample_rate;
+    stream->codecpar->channels = codec_ctx->channels;
+    stream->codecpar->channel_layout = codec_ctx->channel_layout;
+    stream->time_base = codec_ctx->time_base;
+#else
+    *(stream->codec) = *codec_ctx;
+#endif
+}
+
+// Additional compatibility functions for missing identifiers
+inline int ffmpeg_get_channels(AVCodecContext* ctx) {
+#if FFMPEG_VERSION_MAJOR >= 7
+    return ctx->ch_layout.nb_channels;
+#else
+    return ctx->channels;
+#endif
+}
+
+inline void ffmpeg_set_channels(AVCodecContext* ctx, int channels) {
+#if FFMPEG_VERSION_MAJOR >= 7
+    av_channel_layout_default(&ctx->ch_layout, channels);
+#else
+    ctx->channels = channels;
+#endif
+}
+
+inline void ffmpeg_set_channel_layout(AVCodecContext* ctx, uint64_t channel_layout) {
+#if FFMPEG_VERSION_MAJOR >= 7
+    av_channel_layout_from_mask(&ctx->ch_layout, channel_layout);
+#else
+    ctx->channel_layout = channel_layout;
+#endif
+}
+
+inline void ffmpeg_set_frame_channels(AVFrame* frame, int channels) {
+#if FFMPEG_VERSION_MAJOR >= 7
+    av_channel_layout_default(&frame->ch_layout, channels);
+#else
+    frame->channels = channels;
+#endif
+}
+
+inline void ffmpeg_set_frame_channel_layout(AVFrame* frame, uint64_t channel_layout) {
+#if FFMPEG_VERSION_MAJOR >= 7
+    av_channel_layout_from_mask(&frame->ch_layout, channel_layout);
+#else
+    frame->channel_layout = channel_layout;
+#endif
+}
+
+inline void ffmpeg_set_frame_pkt_pts(AVFrame* frame, int64_t pts) {
+#if FFMPEG_VERSION_MAJOR >= 4
+    frame->pts = pts;
+#else
+    frame->pkt_pts = pts;
+#endif
+}
+
+// Compatibility for bitstream filter initialization
+#if FFMPEG_VERSION_MAJOR >= 4
+inline AVBitStreamFilterContext* ffmpeg_bitstream_filter_init(const char* name)
+{
+    const AVBitStreamFilter* bsf = av_bsf_get_by_name(name);
+    if (!bsf) return nullptr;
+    
+    AVBSFContext* ctx = nullptr;
+    if (av_bsf_alloc(bsf, &ctx) < 0) return nullptr;
+    
+    return ctx;
+}
+#else
+inline AVBitStreamFilterContext* ffmpeg_bitstream_filter_init(const char* name) {
+    // For FFmpeg 3.x, use the old API
+    return av_bitstream_filter_init(name);
+}
+#endif
+
+#if FFMPEG_VERSION_MAJOR >= 4
+inline void ffmpeg_bitstream_filter_close(AVBitStreamFilterContext* ctx) {
+    if (ctx) {
+        av_bsf_free(&ctx);
+    }
+}
+#else
+inline void ffmpeg_bitstream_filter_close(AVBitStreamFilterContext* ctx) {
+    if (ctx) {
+        av_bitstream_filter_close(ctx);
+    }
+}
+#endif
+
+#if FFMPEG_VERSION_MAJOR >= 4
+inline int ffmpeg_bitstream_filter_filter(AVBitStreamFilterContext* ctx, AVPacket* packet) {
+    if (!ctx || !packet) return -1;
+    
+    // 对于新版本FFmpeg,需要先初始化BSF上下文
+    if (ctx->par_in && ctx->par_in->codec_type == AVMEDIA_TYPE_UNKNOWN) {
+        // 从packet推断参数
+        ctx->par_in->codec_type = AVMEDIA_TYPE_AUDIO;
+        ctx->par_in->codec_id = AV_CODEC_ID_AAC;
+        
+        if (avcodec_parameters_copy(ctx->par_out, ctx->par_in) < 0) {
+            return -1;
+        }
+        
+        if (av_bsf_init(ctx) < 0) {
+            return -1;
+        }
+    }
+    
+    int ret = av_bsf_send_packet(ctx, packet);
+    if (ret < 0) return ret;
+    
+    ret = av_bsf_receive_packet(ctx, packet);
+    return ret;
+}
+#else
+inline int ffmpeg_bitstream_filter_filter(AVBitStreamFilterContext* ctx, AVPacket* packet) {
+    if (!ctx || !packet) return -1;
+    
+    uint8_t* output_data = nullptr;
+    int output_size = 0;
+    
+    int ret = av_bitstream_filter_filter(ctx, nullptr, nullptr, &output_data, &output_size,
+                                         packet->data, packet->size, packet->flags & AV_PKT_FLAG_KEY);
+    
+    if (ret >= 0 && output_data && output_size > 0) {
+        av_packet_unref(packet);
+        packet->data = output_data;
+        packet->size = output_size;
+        packet->buf = av_buffer_create(output_data, output_size, av_buffer_default_free, nullptr, 0);
+    }
+    
+    return ret;
+}
+#endif

+ 104 - 0
libs/AVPlayer/test_avplayer.cpp

@@ -0,0 +1,104 @@
+#include <QApplication>
+#include <QMainWindow>
+#include <QVBoxLayout>
+#include <QWidget>
+#include <QFileDialog>
+#include <QMenuBar>
+#include <QAction>
+#include <QMessageBox>
+#include "avplayerwidget.h"
+
+class TestMainWindow : public QMainWindow
+{
+    Q_OBJECT
+
+public:
+    TestMainWindow(QWidget *parent = nullptr)
+        : QMainWindow(parent)
+    {
+        setupUI();
+        setupMenus();
+    }
+
+private slots:
+    void openFile()
+    {
+        QString fileName = QFileDialog::getOpenFileName(this,
+            tr("打开视频文件"), "",
+            tr("视频文件 (*.mp4 *.avi *.mkv *.mov *.wmv *.flv *.webm);;所有文件 (*)"));
+        
+        if (!fileName.isEmpty()) {
+            m_playerWidget->play(fileName);
+        }
+    }
+    
+    void showAbout()
+    {
+        QMessageBox::about(this, tr("关于"), 
+            tr("AVPlayer 测试程序\n\n"
+               "这是一个基于Qt和FFmpeg的视频播放器测试程序。\n"
+               "支持多种视频格式,使用OpenGL进行硬件加速渲染。"));
+    }
+
+private:
+    void setupUI()
+    {
+        m_playerWidget = new AVPlayerWidget(this);
+        setCentralWidget(m_playerWidget);
+        
+        setWindowTitle(tr("AVPlayer 测试程序"));
+        resize(800, 600);
+        
+        // 连接播放状态信号
+        connect(m_playerWidget, &AVPlayerWidget::playStateChanged,
+                this, [this](bool isPlaying) {
+                    QString title = isPlaying ? 
+                        tr("AVPlayer 测试程序 - 播放中") : 
+                        tr("AVPlayer 测试程序");
+                    setWindowTitle(title);
+                });
+    }
+    
+    void setupMenus()
+    {
+        // 文件菜单
+        QMenu *fileMenu = menuBar()->addMenu(tr("文件(&F)"));
+        
+        QAction *openAction = new QAction(tr("打开文件(&O)"), this);
+        openAction->setShortcut(QKeySequence::Open);
+        connect(openAction, &QAction::triggered, this, &TestMainWindow::openFile);
+        fileMenu->addAction(openAction);
+        
+        fileMenu->addSeparator();
+        
+        QAction *exitAction = new QAction(tr("退出(&X)"), this);
+        exitAction->setShortcut(QKeySequence::Quit);
+        connect(exitAction, &QAction::triggered, this, &QWidget::close);
+        fileMenu->addAction(exitAction);
+        
+        // 帮助菜单
+        QMenu *helpMenu = menuBar()->addMenu(tr("帮助(&H)"));
+        
+        QAction *aboutAction = new QAction(tr("关于(&A)"), this);
+        connect(aboutAction, &QAction::triggered, this, &TestMainWindow::showAbout);
+        helpMenu->addAction(aboutAction);
+    }
+    
+    AVPlayerWidget *m_playerWidget;
+};
+
+int main(int argc, char *argv[])
+{
+    QApplication app(argc, argv);
+    
+    app.setApplicationName("AVPlayer Test");
+    app.setApplicationVersion("1.0");
+    app.setOrganizationName("Test Organization");
+    
+    TestMainWindow window;
+    window.show();
+    
+    return app.exec();
+}
+
+#include "test_avplayer.moc"

+ 38 - 0
libs/AVPlayer/test_avplayer.pro

@@ -0,0 +1,38 @@
+QT += core widgets opengl
+
+CONFIG += c++11
+
+TARGET = test_avplayer
+TEMPLATE = app
+
+# 包含AVPlayer库
+include(AvPlayer.pri)
+
+# FFmpeg库路径 - 需要根据实际安装路径调整
+win32 {
+    # FFmpeg include路径
+    INCLUDEPATH += C:/ffmpeg/include
+    
+    # FFmpeg库路径
+    LIBS += -LC:/ffmpeg/lib \
+            -lavcodec \
+            -lavformat \
+            -lavutil \
+            -lswscale \
+            -lswresample
+}
+
+# 源文件
+SOURCES += test_avplayer.cpp
+
+# 如果需要,可以添加资源文件
+# RESOURCES += resources.qrc
+
+# 输出目录
+DESTDIR = $$PWD/bin
+
+# 临时文件目录
+OBJECTS_DIR = $$PWD/build/obj
+MOC_DIR = $$PWD/build/moc
+RCC_DIR = $$PWD/build/rcc
+UI_DIR = $$PWD/build/ui

+ 103 - 0
libs/AVPlayer/threadpool.cpp

@@ -0,0 +1,103 @@
+#include "threadpool.h"
+#include <QDebug>
+
+int ThreadPool::m_maxThreads;
+
+int ThreadPool::m_freeThreads;
+
+int ThreadPool::m_initFlag = -1;
+
+int ThreadPool::m_pushIndex = 0;
+
+int ThreadPool::m_readIndex = 0;
+
+int ThreadPool::m_size = 0;
+
+std::vector<ThreadPool::Threads> ThreadPool::m_threadsQueue;
+
+int ThreadPool::m_maxTasks;
+
+std::vector<ThreadPool::Task> ThreadPool::m_tasksQueue;
+
+std::mutex ThreadPool::m_mutex;
+
+std::condition_variable ThreadPool::m_cond;
+
+bool ThreadPool::init(int threadsNum, int tasksNum)
+{
+    if (m_initFlag != -1)
+        return true;
+    if (threadsNum <= 0 || tasksNum <= 0)
+        return false;
+    m_maxThreads = threadsNum;
+    m_maxTasks = tasksNum;
+    m_freeThreads = m_maxThreads;
+    m_threadsQueue.resize(m_maxThreads);
+    m_tasksQueue.resize(m_maxTasks);
+    for (int i = 0; i < m_maxThreads; i++) {
+        m_threadsQueue[i].isWorking = false;
+        m_threadsQueue[i].isTerminate = false;
+        std::thread* _thread = new std::thread(threadEventLoop, &m_threadsQueue[i]);
+        if (!_thread) {
+            return false;
+        }
+        m_threadsQueue[i].id = _thread->get_id();
+        _thread->detach();
+    }
+    m_initFlag = 1;
+    return true;
+}
+
+bool ThreadPool::addTask(std::function<void(std::shared_ptr<void>)> func, std::shared_ptr<void> par)
+{
+    std::unique_lock<std::mutex> lock(m_mutex);
+    if (m_size >= m_maxTasks)
+        return false;
+    m_tasksQueue.at(m_pushIndex).func = func;
+    m_tasksQueue.at(m_pushIndex).par = par;
+    m_size++;
+    m_pushIndex = (m_pushIndex + 1) % m_maxTasks;
+    //qDebug()<<"free threads:"<<m_freeThreads;
+    m_cond.notify_one();
+    return true;
+}
+
+void ThreadPool::threadEventLoop(void* arg)
+{
+    auto theThread = reinterpret_cast<Threads*>(arg);
+    while (true) {
+        std::unique_lock<std::mutex> lock(m_mutex);
+        while (!m_size) {
+            if (theThread->isTerminate)
+                break;
+            m_cond.wait(lock);
+        }
+        if (theThread->isTerminate)
+            break;
+        Task task = m_tasksQueue[m_readIndex];
+        m_tasksQueue[m_readIndex].func = nullptr;
+        m_tasksQueue[m_readIndex].par.reset();
+        m_readIndex = (m_readIndex + 1) % m_maxTasks;
+        m_size--;
+        m_freeThreads--;
+        lock.unlock();
+        theThread->isWorking = true;
+        //执行任务函数
+        task.func(task.par);
+        theThread->isWorking = false;
+        lock.lock();
+        m_freeThreads++;
+    }
+}
+
+void ThreadPool::releasePool()
+{
+    std::unique_lock<std::mutex> lock(m_mutex);
+    for (int i = 0; i < m_maxThreads; i++) {
+        m_threadsQueue[i].isTerminate = true;
+    }
+    m_cond.notify_all();
+    lock.unlock();
+    //等待线程全部退出
+    std::this_thread::sleep_for(std::chrono::milliseconds(500));
+}

+ 61 - 0
libs/AVPlayer/threadpool.h

@@ -0,0 +1,61 @@
+#ifndef THREAD_POOL_H
+#define THREAD_POOL_H
+
+#include <condition_variable>
+#include <functional>
+#include <memory>
+#include <mutex>
+#include <thread>
+
+class ThreadPool
+{
+public:
+    static bool init(int threadsNum = 6, int tasksNum = 10);
+
+    static bool addTask(std::function<void(std::shared_ptr<void>)> func, std::shared_ptr<void> par);
+
+    static void releasePool();
+
+private:
+    typedef struct Task
+    {
+        //任务函数
+        std::function<void(std::shared_ptr<void>)> func;
+        //任务函数参数
+        std::shared_ptr<void> par;
+    } Task;
+
+    typedef struct Threads
+    {
+        std::thread::id id;
+        bool isTerminate;
+        bool isWorking;
+    } Threads;
+
+    static void threadEventLoop(void* arg);
+
+    static int m_maxThreads;
+
+    static int m_freeThreads;
+
+    static int m_maxTasks;
+
+    static int m_pushIndex;
+
+    static int m_readIndex;
+
+    static int m_size;
+
+    //初始化标志,保证init首次调用有效
+    static int m_initFlag;
+
+    static std::vector<Threads> m_threadsQueue;
+
+    static std::mutex m_mutex;
+
+    static std::condition_variable m_cond;
+
+    static std::vector<Task> m_tasksQueue;
+};
+
+#endif

+ 122 - 0
libs/AVPlayer/vframe.h

@@ -0,0 +1,122 @@
+#ifndef VFRAME_H
+#define VFRAME_H
+#include <QObject>
+#include <stdlib.h>
+#include <string.h>
+
+#include "ffmpeg_compat.h"
+
+class VideoFrame
+{
+public:
+    // 从各平面数据+步幅构建,内部拷贝成为自有连续缓冲区
+    VideoFrame(AVPixelFormat fmt,
+               uint32_t pixelW,
+               uint32_t pixelH,
+               uint8_t* const data[4],
+               const int linesize[4])
+        : m_buffer(nullptr)
+        , m_bufSize(0)
+        , m_format(fmt)
+        , m_pixelW(pixelW)
+        , m_pixelH(pixelH)
+    {
+        allocateAndCopy(data, linesize);
+    }
+
+    // 从连续内存基址构建(假定按 align=1 的 FFmpeg 平面布局),用于快速拷贝
+    VideoFrame(AVPixelFormat fmt, uint32_t pixelW, uint32_t pixelH, const uint8_t* base)
+        : m_buffer(nullptr)
+        , m_bufSize(0)
+        , m_format(fmt)
+        , m_pixelW(pixelW)
+        , m_pixelH(pixelH)
+    {
+        int needed = av_image_get_buffer_size(fmt, pixelW, pixelH, 1);
+        if (needed <= 0) return;
+        m_buffer = (uint8_t*)malloc(needed);
+        if (!m_buffer) return;
+        m_bufSize = needed;
+        memcpy(m_buffer, base, needed);
+        // 填充平面指针与步幅
+        av_image_fill_arrays(m_data, m_linesize, m_buffer, fmt, pixelW, pixelH, 1);
+    }
+
+    ~VideoFrame()
+    {
+        if (m_buffer)
+            free(m_buffer);
+        m_buffer = nullptr;
+        m_bufSize = 0;
+    }
+
+    inline uint8_t* getBufY() const { return m_data[0]; }
+    inline uint8_t* getBufU() const { return m_data[1]; }
+    inline uint8_t* getBufV() const { return m_data[2]; }
+
+    inline uint8_t* getData(int i) const { return (i >=0 && i < 4) ? m_data[i] : nullptr; }
+    inline int getLineSize(int i) const { return (i >=0 && i < 4) ? m_linesize[i] : 0; }
+
+    inline uint32_t getPixelW() const { return m_pixelW; }
+    inline uint32_t getPixelH() const { return m_pixelH; }
+
+    inline AVPixelFormat getFormat() const { return m_format; }
+
+    // 设置为纯黑(适用于 YUV 家族,其他格式不保证效果)
+    void setToPureBlack()
+    {
+        if (!m_buffer) return;
+        if (m_format == AV_PIX_FMT_YUV420P || m_format == AV_PIX_FMT_YUVJ420P) {
+            int sizeY = m_pixelW * m_pixelH;
+            memset(m_data[0], 16, sizeY);
+            memset(m_data[1], 128, sizeY >> 2);
+            memset(m_data[2], 128, sizeY >> 2);
+        } else if (m_format == AV_PIX_FMT_YUV422P) {
+            int sizeY = m_pixelW * m_pixelH;
+            memset(m_data[0], 16, sizeY);
+            memset(m_data[1], 128, sizeY >> 1);
+            memset(m_data[2], 128, sizeY >> 1);
+        } else if (m_format == AV_PIX_FMT_YUV444P) {
+            int sizeY = m_pixelW * m_pixelH;
+            memset(m_data[0], 16, sizeY);
+            memset(m_data[1], 128, sizeY);
+            memset(m_data[2], 128, sizeY);
+        } else {
+            // 其他格式:简单清零
+            memset(m_buffer, 0, m_bufSize);
+        }
+    }
+
+private:
+    void allocateAndCopy(uint8_t* const data[4], const int linesize[4])
+    {
+        int needed = av_image_get_buffer_size(m_format, m_pixelW, m_pixelH, 1);
+        if (needed <= 0) return;
+        m_buffer = (uint8_t*)malloc(needed);
+        if (!m_buffer) return;
+        m_bufSize = needed;
+        // 将给定 data/linesize 拷贝到连续缓冲区
+        av_image_copy_to_buffer(m_buffer,
+                                needed,
+                                (const uint8_t* const*)data,
+                                linesize,
+                                m_format,
+                                m_pixelW,
+                                m_pixelH,
+                                1);
+        // 填充平面指针与步幅
+        av_image_fill_arrays(m_data, m_linesize, m_buffer, m_format, m_pixelW, m_pixelH, 1);
+    }
+
+    uint8_t* m_buffer;
+    int m_bufSize;
+
+    AVPixelFormat m_format;
+    uint32_t m_pixelW;
+    uint32_t m_pixelH;
+
+    uint8_t* m_data[4] = {nullptr, nullptr, nullptr, nullptr};
+    int m_linesize[4] = {0, 0, 0, 0};
+};
+
+#endif // VFRAME_H

+ 1 - 0
libs/Recorder/Recorder.pri

@@ -18,6 +18,7 @@ HEADERS += \
     $$PWD/encoder_video_x264.h \
     $$PWD/error_define.h \
     $$PWD/export.h \
+    $$PWD/ffmpeg_compat.h \
     $$PWD/filter.h \
     $$PWD/filter_amix.h \
     $$PWD/filter_aresample.h \

+ 192 - 0
libs/Recorder/WGC/WGC.vcxproj

@@ -0,0 +1,192 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <ItemGroup Label="ProjectConfigurations">
+    <ProjectConfiguration Include="Debug_DLL|Win32">
+      <Configuration>Debug_DLL</Configuration>
+      <Platform>Win32</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="Debug|Win32">
+      <Configuration>Debug</Configuration>
+      <Platform>Win32</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="Release_DLL|Win32">
+      <Configuration>Release_DLL</Configuration>
+      <Platform>Win32</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="Release|Win32">
+      <Configuration>Release</Configuration>
+      <Platform>Win32</Platform>
+    </ProjectConfiguration>
+  </ItemGroup>
+  <PropertyGroup Label="Globals">
+    <VCProjectVersion>16.0</VCProjectVersion>
+    <Keyword>Win32Proj</Keyword>
+    <ProjectGuid>{ed5a2ff2-13f2-4a51-a347-6251f0024ca0}</ProjectGuid>
+    <RootNamespace>WGC</RootNamespace>
+    <WindowsTargetPlatformVersion>10.0</WindowsTargetPlatformVersion>
+  </PropertyGroup>
+  <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
+    <ConfigurationType>DynamicLibrary</ConfigurationType>
+    <UseDebugLibraries>true</UseDebugLibraries>
+    <PlatformToolset>v143</PlatformToolset>
+    <CharacterSet>Unicode</CharacterSet>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug_DLL|Win32'" Label="Configuration">
+    <ConfigurationType>DynamicLibrary</ConfigurationType>
+    <UseDebugLibraries>true</UseDebugLibraries>
+    <PlatformToolset>v143</PlatformToolset>
+    <CharacterSet>Unicode</CharacterSet>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
+    <ConfigurationType>DynamicLibrary</ConfigurationType>
+    <UseDebugLibraries>false</UseDebugLibraries>
+    <PlatformToolset>v142</PlatformToolset>
+    <WholeProgramOptimization>true</WholeProgramOptimization>
+    <CharacterSet>Unicode</CharacterSet>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release_DLL|Win32'" Label="Configuration">
+    <ConfigurationType>DynamicLibrary</ConfigurationType>
+    <UseDebugLibraries>false</UseDebugLibraries>
+    <PlatformToolset>v143</PlatformToolset>
+    <WholeProgramOptimization>true</WholeProgramOptimization>
+    <CharacterSet>Unicode</CharacterSet>
+  </PropertyGroup>
+  <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
+  <ImportGroup Label="ExtensionSettings">
+  </ImportGroup>
+  <ImportGroup Label="Shared">
+  </ImportGroup>
+  <ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+    <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+  </ImportGroup>
+  <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug_DLL|Win32'" Label="PropertySheets">
+    <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+  </ImportGroup>
+  <ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+    <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+  </ImportGroup>
+  <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release_DLL|Win32'" Label="PropertySheets">
+    <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+  </ImportGroup>
+  <PropertyGroup Label="UserMacros" />
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+    <LinkIncremental>true</LinkIncremental>
+    <OutDir>$(SolutionDir)Bin\$(PlatformShortName)\$(Configuration)\</OutDir>
+    <IntDir>$(SolutionDir)Bin\$(PlatformShortName)\$(Configuration)\</IntDir>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug_DLL|Win32'">
+    <LinkIncremental>true</LinkIncremental>
+    <OutDir>$(SolutionDir)Bin\$(PlatformShortName)\$(Configuration)\</OutDir>
+    <IntDir>$(SolutionDir)Bin\$(PlatformShortName)\$(Configuration)\</IntDir>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+    <LinkIncremental>false</LinkIncremental>
+    <OutDir>$(SolutionDir)Bin\$(PlatformShortName)\$(Configuration)\</OutDir>
+    <IntDir>$(SolutionDir)Bin\$(PlatformShortName)\$(Configuration)\</IntDir>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release_DLL|Win32'">
+    <LinkIncremental>false</LinkIncremental>
+    <OutDir>$(SolutionDir)Bin\$(PlatformShortName)\$(Configuration)\</OutDir>
+    <IntDir>$(SolutionDir)Bin\$(PlatformShortName)\$(Configuration)\</IntDir>
+  </PropertyGroup>
+  <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+    <ClCompile>
+      <WarningLevel>Level3</WarningLevel>
+      <SDLCheck>true</SDLCheck>
+      <PreprocessorDefinitions>WIN32;_DEBUG;WGC_EXPORTS;_WINDOWS;_USRDLL;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+      <ConformanceMode>true</ConformanceMode>
+      <PrecompiledHeader>Use</PrecompiledHeader>
+      <PrecompiledHeaderFile>pch.h</PrecompiledHeaderFile>
+      <LanguageStandard>stdcpp17</LanguageStandard>
+      <AdditionalOptions>/Zc:twoPhase- %(AdditionalOptions)</AdditionalOptions>
+    </ClCompile>
+    <Link>
+      <SubSystem>Windows</SubSystem>
+      <GenerateDebugInformation>true</GenerateDebugInformation>
+      <EnableUAC>false</EnableUAC>
+      <AdditionalDependencies>windowsapp.lib;dwmapi.lib;%(AdditionalDependencies)</AdditionalDependencies>
+    </Link>
+  </ItemDefinitionGroup>
+  <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug_DLL|Win32'">
+    <ClCompile>
+      <WarningLevel>Level3</WarningLevel>
+      <SDLCheck>true</SDLCheck>
+      <PreprocessorDefinitions>WIN32;_DEBUG;WGC_EXPORTS;_WINDOWS;_USRDLL;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+      <ConformanceMode>true</ConformanceMode>
+      <PrecompiledHeader>Use</PrecompiledHeader>
+      <PrecompiledHeaderFile>pch.h</PrecompiledHeaderFile>
+      <LanguageStandard>stdcpp17</LanguageStandard>
+      <AdditionalOptions>/Zc:twoPhase- %(AdditionalOptions)</AdditionalOptions>
+    </ClCompile>
+    <Link>
+      <SubSystem>Windows</SubSystem>
+      <GenerateDebugInformation>true</GenerateDebugInformation>
+      <EnableUAC>false</EnableUAC>
+      <AdditionalDependencies>windowsapp.lib;dwmapi.lib;%(AdditionalDependencies)</AdditionalDependencies>
+    </Link>
+  </ItemDefinitionGroup>
+  <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+    <ClCompile>
+      <WarningLevel>Level3</WarningLevel>
+      <FunctionLevelLinking>true</FunctionLevelLinking>
+      <IntrinsicFunctions>true</IntrinsicFunctions>
+      <SDLCheck>true</SDLCheck>
+      <PreprocessorDefinitions>WIN32;NDEBUG;WGC_EXPORTS;_WINDOWS;_USRDLL;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+      <ConformanceMode>true</ConformanceMode>
+      <PrecompiledHeader>Use</PrecompiledHeader>
+      <PrecompiledHeaderFile>pch.h</PrecompiledHeaderFile>
+      <LanguageStandard>stdcpp17</LanguageStandard>
+      <AdditionalOptions>/Zc:twoPhase- %(AdditionalOptions)</AdditionalOptions>
+    </ClCompile>
+    <Link>
+      <SubSystem>Windows</SubSystem>
+      <EnableCOMDATFolding>true</EnableCOMDATFolding>
+      <OptimizeReferences>true</OptimizeReferences>
+      <GenerateDebugInformation>true</GenerateDebugInformation>
+      <EnableUAC>false</EnableUAC>
+      <AdditionalDependencies>windowsapp.lib;dwmapi.lib;%(AdditionalDependencies)</AdditionalDependencies>
+    </Link>
+  </ItemDefinitionGroup>
+  <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release_DLL|Win32'">
+    <ClCompile>
+      <WarningLevel>Level3</WarningLevel>
+      <FunctionLevelLinking>true</FunctionLevelLinking>
+      <IntrinsicFunctions>true</IntrinsicFunctions>
+      <SDLCheck>true</SDLCheck>
+      <PreprocessorDefinitions>WIN32;NDEBUG;WGC_EXPORTS;_WINDOWS;_USRDLL;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+      <ConformanceMode>true</ConformanceMode>
+      <PrecompiledHeader>Use</PrecompiledHeader>
+      <PrecompiledHeaderFile>pch.h</PrecompiledHeaderFile>
+      <LanguageStandard>stdcpp17</LanguageStandard>
+      <AdditionalOptions>/Zc:twoPhase- %(AdditionalOptions)</AdditionalOptions>
+    </ClCompile>
+    <Link>
+      <SubSystem>Windows</SubSystem>
+      <EnableCOMDATFolding>true</EnableCOMDATFolding>
+      <OptimizeReferences>true</OptimizeReferences>
+      <GenerateDebugInformation>true</GenerateDebugInformation>
+      <EnableUAC>false</EnableUAC>
+      <AdditionalDependencies>windowsapp.lib;dwmapi.lib;%(AdditionalDependencies)</AdditionalDependencies>
+    </Link>
+  </ItemDefinitionGroup>
+  <ItemGroup>
+    <ClInclude Include="export.h" />
+    <ClInclude Include="pch.h" />
+    <ClInclude Include="wgc_session_impl.h" />
+  </ItemGroup>
+  <ItemGroup>
+    <ClCompile Include="dllmain.cpp" />
+    <ClCompile Include="export.cpp" />
+    <ClCompile Include="pch.cpp">
+      <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">Create</PrecompiledHeader>
+      <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug_DLL|Win32'">Create</PrecompiledHeader>
+      <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">Create</PrecompiledHeader>
+      <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release_DLL|Win32'">Create</PrecompiledHeader>
+    </ClCompile>
+    <ClCompile Include="wgc_session_impl.cpp" />
+  </ItemGroup>
+  <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
+  <ImportGroup Label="ExtensionTargets">
+  </ImportGroup>
+</Project>

+ 42 - 0
libs/Recorder/WGC/WGC.vcxproj.filters

@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <ItemGroup>
+    <Filter Include="Source Files">
+      <UniqueIdentifier>{4FC737F1-C7A5-4376-A066-2A32D752A2FF}</UniqueIdentifier>
+      <Extensions>cpp;c;cc;cxx;c++;cppm;ixx;def;odl;idl;hpj;bat;asm;asmx</Extensions>
+    </Filter>
+    <Filter Include="Header Files">
+      <UniqueIdentifier>{93995380-89BD-4b04-88EB-625FBE52EBFB}</UniqueIdentifier>
+      <Extensions>h;hh;hpp;hxx;h++;hm;inl;inc;ipp;xsd</Extensions>
+    </Filter>
+    <Filter Include="Resource Files">
+      <UniqueIdentifier>{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}</UniqueIdentifier>
+      <Extensions>rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms</Extensions>
+    </Filter>
+  </ItemGroup>
+  <ItemGroup>
+    <ClInclude Include="pch.h">
+      <Filter>Header Files</Filter>
+    </ClInclude>
+    <ClInclude Include="export.h">
+      <Filter>Header Files</Filter>
+    </ClInclude>
+    <ClInclude Include="wgc_session_impl.h">
+      <Filter>Header Files</Filter>
+    </ClInclude>
+  </ItemGroup>
+  <ItemGroup>
+    <ClCompile Include="dllmain.cpp">
+      <Filter>Source Files</Filter>
+    </ClCompile>
+    <ClCompile Include="pch.cpp">
+      <Filter>Source Files</Filter>
+    </ClCompile>
+    <ClCompile Include="export.cpp">
+      <Filter>Source Files</Filter>
+    </ClCompile>
+    <ClCompile Include="wgc_session_impl.cpp">
+      <Filter>Source Files</Filter>
+    </ClCompile>
+  </ItemGroup>
+</Project>

+ 4 - 0
libs/Recorder/WGC/WGC.vcxproj.user

@@ -0,0 +1,4 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="Current" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <PropertyGroup />
+</Project>

+ 1 - 0
libs/libs.pri

@@ -10,3 +10,4 @@ include($$PWD/utils/utils.pri)
 
 
 include($$PWD/Recorder/Recorder.pri)
+include($$PWD/AvPlayer/AvPlayer.pri)

+ 24 - 135
main.cpp

@@ -1,141 +1,30 @@
-// #include "AVPlayer2/mainwindowa.h"
-
 #include <QApplication>
-#include <QDateTime>
 #include <QDebug>
-#include <QFile>
-#include <QLoggingCategory>
-#include <QMutex>
-#include <QTextStream>
-#include <QVBoxLayout>
-#include <qendian.h>
-#include <qmath.h>
-
-#include <AvPlayer2/PlayWidget.h>
-
-#include "mainwindow.h"
-#include "thememanager.h"
-#include "themesettingswidget.h"
-
-#include "tlogger.h"
-
-#include "ui/av_recorder.h"
-
-namespace avrecorder::video { void InitWinRTCapture(); }
-
-#undef ERROR
-// Qt输出(用于兼容现有Qt日志系统)
-
-namespace TC {
-class QtOutput : public LogOutput
-{
-public:
-    void write(LogLevel level, const std::string &message) override;
-};
-
-void QtOutput::write(LogLevel level, const std::string &message)
-{
-    const QString qmsg = QString::fromUtf8(message.c_str());
-    switch (level) {
-    case LogLevel::DEBUG:
-        qDebug() << qmsg;
-        break;
-    case LogLevel::INFO:
-        qInfo() << qmsg;
-        break;
-    case LogLevel::WARNING:
-        qWarning() << qmsg;
-        break;
-    case LogLevel::ERROR:
-        qCritical() << qmsg;
-        break;
-    }
-}
-} // namespace TC
+#include "AVPlayer/avplayerwidget.h"
+#include "AVPlayer/vframe.h"
+#include "widgets/recorderwidget.h"
 
 int main(int argc, char *argv[])
 {
-    // "player.controller.ReadThread=false\n"
-    QLoggingCategory::setFilterRules(QStringLiteral("player.controller.AudioPlayThread=false\n"
-                                                    "player.controller.AudioDecodeThread=false\n"));
-    // QLoggingCategory::setFilterRules("*.debug=false\n"
-    //                                  "*.info=false\n"
-    //                                  "*.warning=false\n"
-    //                                  "*.critical=false\n"
-    //                                  "player.controller.*.debug=true\n"
-    //                                  "player.controller.*.info=true\n");
-
-    // 安装日志处理器
-    //qInstallMessageHandler(myMessageHandler);
-
-    // std::freopen(nullptr, "w", stdout);
-    setvbuf(stdout, nullptr, _IONBF, 0);
-
-#if QT_VERSION >= QT_VERSION_CHECK(5, 14, 0)
-    QGuiApplication::setHighDpiScaleFactorRoundingPolicy(
-        Qt::HighDpiScaleFactorRoundingPolicy::PassThrough);
-#endif
-#if QT_VERSION < QT_VERSION_CHECK(6, 0, 0)
-    QCoreApplication::setAttribute(Qt::AA_EnableHighDpiScaling);
-    QCoreApplication::setAttribute(Qt::AA_UseHighDpiPixmaps);
-#endif
-
-    QCoreApplication::setAttribute(Qt::AA_DontCreateNativeWidgetSiblings);
-
-    QApplication a(argc, argv);
-
-    qRegisterMetaType<AVFrame *>("AVFrame*");
-
-    ThemeManager::instance().setThemeMode(ThemeManager::Light);
-
-    // 注册Room 相关的类型 方便 序列化
-    void initRoomType();
-    initRoomType();
-
-    // 初始化wgc
-
-    avrecorder::video::InitWinRTCapture();
-
-    /*
-docker run -itd  --name zlmediakit --restart=always
--p 1935:1935 -p 8080:80 -p 8443:443
--p 8554:554 -p 10000:10000
--p 10000:10000/udp -p 8000:8000/udp
--p 9000:9000/udp
--v /data/zlmediakit/media/bin:/opt/media/bin
--v /data/zlmediakit/media/conf:/opt/media/conf
-zlmediakit/zlmediakit:master
-*/
-    MainWindow w;
-    w.show();
-
-    // PlayWidget playWidget;
-    // playWidget.resize(960, 540);
-    // playWidget.show();
-
-    // for (int var = 0; var < 20; ++var) {
-    //     playWidget.startToPlay("C:/Users/zhuizhu/Videos/2.mp4");
-    // }
-
-    // AvRecorder avRecorder;
-    // avRecorder.show();
-    // ThemeSettingsWidget ThemeSettingsWidget;
-    // ThemeSettingsWidget.show();
-
-    // PlayerWindow w;
-    // w.resize(960, 540);
-    // w.show();
-
-    // MainWindowA aa;
-    // aa.show();
-
-    // // 这里填你的流地址
-    // // w.startPlay("http://vd3.bdstatic.com/mda-jennyc5ci1ugrxzi/mda-jennyc5ci1ugrxzi.mp4");
-
-    // w.open("C:/Users/zhuizhu/Videos/1.mp4");
-    // // w.startPlay("rtmp://192.168.3.76:1935/stream/V1/stream");
-
-    int ret = a.exec();
-
-    return ret;
+    QApplication app(argc, argv);
+    qRegisterMetaType<QSharedPointer<VideoFrame>>("QSharedPointer<VideoFrame>");
+    try {
+        qDebug() << "Creating RecorderWidget...";
+        RecorderWidget testRecorder;
+        
+        qDebug() << "Showing RecorderWidget...";
+        testRecorder.show();
+
+        AVPlayerWidget avPlayerWidget;
+        avPlayerWidget.show();
+        qDebug() << "RecorderWidget created and shown successfully!";
+        
+        return app.exec();
+    } catch (const std::exception& e) {
+        qCritical() << "Exception caught:" << e.what();
+        return -1;
+    } catch (...) {
+        qCritical() << "Unknown exception caught";
+        return -1;
+    }
 }

+ 3 - 1
widgets/recorderwidget.cpp

@@ -45,6 +45,7 @@ RecorderWidget::RecorderWidget(QWidget *parent)
     // 设置静态实例指针
     s_instance = this;
     // rtmp://106.55.186.74:1935/stream/V1/0198da41-cdb6-78e3-879d-2ea32d58f73f
+
     // 初始化默认设置
     m_settings.liveUrl = "rtmp://106.55.186.74:1935/stream/V1";
     m_settings.liveName = "0198da41-cdb6-78e3-879d-2ea32d58f73f";
@@ -461,7 +462,8 @@ bool RecorderWidget::startStreaming()
         strcpy_s(m_recorderSetting.a_speaker.name, m_speakerDevices[speakerIndex].name);
         m_recorderSetting.a_speaker.is_default = m_speakerDevices[speakerIndex].is_default;
     }
-    
+
+    qDebug() << m_recorderSetting.output;
     // 异步初始化和启动推流,避免主线程阻塞
     QFuture<void> future = QtConcurrent::run([this]() {
         // 初始化录制器

+ 2 - 2
widgets/recorderwidget.h

@@ -33,7 +33,7 @@ class RecorderWidget : public QWidget
 
 public:
     struct Settings {
-        std::string liveUrl = "rtmp://106.55.186.74:1935/stream/V1";
+        std::string liveUrl = "rtmp://127.0.0.1:1935/stream/V1";
         std::string liveName = "stream";
         std::string outputDir = ".";
         int videoBitRate = 8000000;  // 8Mbps
@@ -144,4 +144,4 @@ private:
     
     // 静态实例指针,用于回调
     static RecorderWidget* s_instance;
-};
+};