Qt之QOpenGLWidget使用QPainter绘制FFmpeg解码的YUV420P帧

#ifndef GLWIDGET_H
#define GLWIDGET_H

#include <QOpenGLWidget>
#include <QPainter>
#include <QDebug>
#include <QTimer>
#include <QTime>
#include <QString>
#include <QtDebug>
#include "decoder.h"

class GLWidget : public QOpenGLWidget
{
    Q_OBJECT
public:
    GLWidget();
    ~GLWidget() override;
    void decoder(Decoder*);
private:
    QTimer *m_t_update = nullptr;
    Decoder *m_decoder = nullptr;
    int m_t_num = -1;
    QTime m_t_fps;
public slots:
    void uuu();

protected:
    void paintEvent(QPaintEvent *event) override;
};

#endif // GLWIDGET_H
#include "glwidget.h"

GLWidget::GLWidget()
{
    this->m_t_update = new QTimer(this);
    connect(m_t_update, SIGNAL(timeout()), this, SLOT(uuu()));
    m_t_update->start(10);
}

GLWidget::~GLWidget()
{
}

void GLWidget::uuu()
{
    update();
}

void GLWidget::decoder(Decoder* _decoder)
{
    this->m_decoder = _decoder;
}

void GLWidget::paintEvent(QPaintEvent *event)
{
    int i = m_decoder->m_frames->size();
    if (i > 0)
    {
    Frame frame = m_decoder->m_frames->front();
    QPainter painter;
    painter.begin(this);
    painter.setRenderHint(QPainter::Antialiasing);
    painter.drawImage(QRect(0,0, 1920,1080), frame.frame);
    painter.end();
    }
    if (i > 0)
    {
    m_decoder->m_frames->pop();
    }
    if (m_t_num == -1 || m_t_fps.elapsed() > 1000) {
    qreal fps = m_t_num * 1000.0 / m_t_fps.elapsed();
    qDebug(QString("-> FPS -> %1").arg(fps).toStdString().c_str());
    m_t_fps.start();
    m_t_num = 0;
    }
    m_t_num++;
}

#ifndef DECODER_H
#define DECODER_H

#include <QDebug>
#include <QThread>
#include <QRunnable>
#include <QScopedPointer>
#include <QImage>
#include "frames.h"

extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavutil/imgutils.h"
}

class Decoder : public QRunnable
{
public:
    Decoder();
    ~Decoder();
    void run();
    bool create();
public:
    AVFormatContext *pFormatCtx;
    AVCodecContext  *pCodecCtx;
    AVCodec     *pCodec;
    int videoindex;
public:
    QScopedPointer<Frames> m_frames;
};

#endif // DECODER_H
#include "decoder.h"

Decoder::Decoder()
{
    m_frames.reset(new Frames());
}

Decoder::~Decoder()
{
}

bool Decoder::create()
{
    av_register_all();
    avformat_network_init();

    pFormatCtx = avformat_alloc_context();

    if(avformat_open_input(&pFormatCtx, "E:/1111.mp4", nullptr, nullptr) != 0){
    printf("Couldn't open input stream.\n");
    return false;
    }

    if(avformat_find_stream_info(pFormatCtx, nullptr) < 0){
    printf("Couldn't find stream information.\n");
    return false;
    }

    videoindex = -1;
    for(int i = 0; i < pFormatCtx->nb_streams; i++) {
    if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
        videoindex = i;
        break;
    }
    }
    if(videoindex == -1) {
    printf("Didn't find a video stream.\n");
    return false;
    }

    // --------------------------------- //
    AVCodec* currentCodec = nullptr;
    currentCodec = av_codec_next(currentCodec);
    while (currentCodec != nullptr) {
    if (av_codec_is_encoder(currentCodec)) {
        // m_log->debug(QString("Encoder -> %1 %2").arg(currentCodec->name).arg(currentCodec->long_name));
    }
    if (av_codec_is_decoder(currentCodec)) {
        // m_log->debug(QString("Decoder -> %1 %2").arg(currentCodec->name).arg(currentCodec->long_name));
    }
    currentCodec = av_codec_next(currentCodec);
    }
    // --------------------------------- //

    pCodecCtx = pFormatCtx->streams[videoindex]->codec;
    pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
    if(pCodec == nullptr) {
    printf("Codec not found.\n");
    return false;
    }

    if(avcodec_open2(pCodecCtx, pCodec, nullptr) < 0) {
    printf("Could not open codec.\n");
    return false;
    }

    // printf("--------------- File Information ----------------\n");
    // av_dump_format(pFormatCtx, 0, "PICT0023.AVI", 0);
    // printf("-------------------------------------------------\n");

    return true;
}

void Decoder::run()
{

    this->create();

    AVFrame *pFrame = av_frame_alloc();
    AVFrame *pFrameYUV = av_frame_alloc();
    AVFrame *pFrameRGB = av_frame_alloc();

    int numBytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
    unsigned char *out_buffer = (unsigned char *)av_malloc(numBytes);
    unsigned char *rgb_buffer = (unsigned char *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_RGB32, pCodecCtx->width, pCodecCtx->height, 1));
    av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
    av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, rgb_buffer, AV_PIX_FMT_RGB32, pCodecCtx->width, pCodecCtx->height, 1);
    static struct SwsContext *img_convert_ctx_yuv = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, nullptr, nullptr, nullptr);
    static struct SwsContext *img_convert_ctx_rgb = sws_getContext(pCodecCtx->width,pCodecCtx->height, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGB32, SWS_BICUBIC, nullptr, nullptr, nullptr);

    AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));

    int count = 0;
    while (av_read_frame(pFormatCtx, packet) >= 0) {
    if(packet->stream_index == videoindex) {
        for(;;)
        {
        int size = m_frames->size();
        if (size < 10) {
            break;
        }
        QThread::msleep(10);
        }
        // For decoding, call avcodec_send_packet() to give the decoder raw compressed data in an AVPacket.
        int frameFinished = avcodec_send_packet(pCodecCtx, packet);
        while (!frameFinished) {
        // For decoding, call avcodec_receive_frame(). On success, it will return an AVFrame containing uncompressed audio or video data.
        frameFinished = avcodec_receive_frame(pCodecCtx, pFrame);
        if (!frameFinished) {
            // m_log->debug(QString("%1 %2").arg(pFrame->format == AV_PIX_FMT_YUV420P ? "OK" : "NO").arg(count));
            // -----------------------------------------------
            sws_scale(img_convert_ctx_yuv, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
            sws_scale(img_convert_ctx_rgb, pFrameYUV->data, pFrameYUV->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);
            QImage img((uchar *)pFrameRGB->data[0], pCodecCtx->width, pCodecCtx->height, QImage::Format_ARGB32);
            m_frames->push(Frame(img.copy(), count));
            count++;
            // -----------------------------------------------
            av_frame_unref(pFrame);
        }
        }
    }
    av_packet_unref(packet);
    }

    av_free(out_buffer);
    av_free(rgb_buffer);
    sws_freeContext(img_convert_ctx_yuv);
    sws_freeContext(img_convert_ctx_rgb);
    av_frame_free(&pFrameYUV);
    av_frame_free(&pFrameRGB);
    av_frame_free(&pFrame);
    avcodec_close(pCodecCtx);
    avformat_close_input(&pFormatCtx);

}