索引地址:系列教程索引地址
上一篇:FFmpeg4入门9:软解并使用QOpenGL播放视频(YUV420P->OpenGL)
QML显示视频无法用之前的方法实现,我结果多次尝试过后终于找到了可以使用的方法。
解码流程图为:

解码函数调用流程图为:

流程架构如下图:

分为三个部分
软解码
主要流程和之前的一样,只是少了格式转换和多了数据填充部分,关键代码如下:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31
| while(av_read_frame(fmtCtx,pkt)>=0){ if(pkt->stream_index == videoStreamIndex){ if(avcodec_send_packet(videoCodecCtx,pkt)>=0){ int ret; while((ret=avcodec_receive_frame(videoCodecCtx,yuvFrame))>=0){ if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) return; else if (ret < 0) { fprintf(stderr, "Error during decoding\n"); continue; }
m_yuvData.Y.resize(yuvFrame->linesize[0]*yuvFrame->height); m_yuvData.Y =QByteArray((char*)yuvFrame->data[0],m_yuvData.Y.size()); m_yuvData.U.resize(yuvFrame->linesize[1]*yuvFrame->height/2); m_yuvData.U =QByteArray((char*)yuvFrame->data[1],m_yuvData.Y.size()); m_yuvData.V.resize(yuvFrame->linesize[2]*yuvFrame->height/2); m_yuvData.V =QByteArray((char*)yuvFrame->data[2],m_yuvData.Y.size()); m_yuvData.yLineSize = yuvFrame->linesize[0]; m_yuvData.uLineSize = yuvFrame->linesize[1]; m_yuvData.vLineSize = yuvFrame->linesize[2]; m_yuvData.height = yuvFrame->height;
frameBuffer.append(m_yuvData);
QThread::msleep(24); } } av_packet_unref(pkt); } }
|
主要是将解码后的YUV(YUV420P)数据复制到指定的结构体中,并排入缓冲队列中。
OpenGL纹理渲染
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134
| I420Render::I420Render() { mTexY = new QOpenGLTexture(QOpenGLTexture::Target2D); mTexY->setFormat(QOpenGLTexture::LuminanceFormat); mTexY->setMinificationFilter(QOpenGLTexture::Nearest); mTexY->setMagnificationFilter(QOpenGLTexture::Nearest); mTexY->setWrapMode(QOpenGLTexture::ClampToEdge);
mTexU = new QOpenGLTexture(QOpenGLTexture::Target2D); mTexU->setFormat(QOpenGLTexture::LuminanceFormat); mTexU->setMinificationFilter(QOpenGLTexture::Nearest); mTexU->setMagnificationFilter(QOpenGLTexture::Nearest); mTexU->setWrapMode(QOpenGLTexture::ClampToEdge);
mTexV = new QOpenGLTexture(QOpenGLTexture::Target2D); mTexV->setFormat(QOpenGLTexture::LuminanceFormat); mTexV->setMinificationFilter(QOpenGLTexture::Nearest); mTexV->setMagnificationFilter(QOpenGLTexture::Nearest); mTexV->setWrapMode(QOpenGLTexture::ClampToEdge); }
void I420Render::init() { initializeOpenGLFunctions(); const char *vsrc = "attribute vec4 vertexIn; \ attribute vec2 textureIn; \ varying vec2 textureOut; \ void main(void) \ { \ gl_Position = vertexIn; \ textureOut = textureIn; \ }";
const char *fsrc = "varying mediump vec2 textureOut;\n" "uniform sampler2D textureY;\n" "uniform sampler2D textureU;\n" "uniform sampler2D textureV;\n" "void main(void)\n" "{\n" "vec3 yuv; \n" "vec3 rgb; \n" "yuv.x = texture2D(textureY, textureOut).r; \n" "yuv.y = texture2D(textureU, textureOut).r - 0.5; \n" "yuv.z = texture2D(textureV, textureOut).r - 0.5; \n" "rgb = mat3( 1, 1, 1, \n" "0, -0.3455, 1.779, \n" "1.4075, -0.7169, 0) * yuv; \n" "gl_FragColor = vec4(rgb, 1); \n" "}\n";
m_program.addCacheableShaderFromSourceCode(QOpenGLShader::Vertex,vsrc); m_program.addCacheableShaderFromSourceCode(QOpenGLShader::Fragment,fsrc); m_program.bindAttributeLocation("vertexIn",0); m_program.bindAttributeLocation("textureIn",1); m_program.link(); m_program.bind();
vertices << QVector2D(-1.0f,1.0f) << QVector2D(1.0f,1.0f) << QVector2D(1.0f,-1.0f) << QVector2D(-1.0f,-1.0f); textures << QVector2D(0.0f,1.f) << QVector2D(1.0f,1.0f) << QVector2D(1.0f,0.0f) << QVector2D(0.0f,0.0f); }
void I420Render::updateTextureInfo(int w, int h) { mTexY->setSize(w,h); mTexY->allocateStorage(QOpenGLTexture::Red,QOpenGLTexture::UInt8);
mTexU->setSize(w/2,h/2); mTexU->allocateStorage(QOpenGLTexture::Red,QOpenGLTexture::UInt8);
mTexV->setSize(w/2,h/2); mTexV->allocateStorage(QOpenGLTexture::Red,QOpenGLTexture::UInt8);
mTextureAlloced=true; }
void I420Render::updateTextureData(const YUVData &data) { if(data.Y.size()<=0 || data.U.size()<=0 || data.V.size()<=0) return;
QOpenGLPixelTransferOptions options; options.setImageHeight(data.height);
options.setRowLength(data.yLineSize); mTexY->setData(QOpenGLTexture::Luminance,QOpenGLTexture::UInt8,data.Y.data(),&options);
options.setRowLength(data.uLineSize); mTexU->setData(QOpenGLTexture::Luminance,QOpenGLTexture::UInt8,data.U.data(),&options);
options.setRowLength(data.vLineSize); mTexV->setData(QOpenGLTexture::Luminance,QOpenGLTexture::UInt8,data.V.data(),&options); }
void I420Render::paint() { glClearColor(0.0f, 0.0f, 0.0f, 1.0f); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glDisable(GL_DEPTH_TEST);
if(!mTextureAlloced) return;
m_program.bind(); m_program.enableAttributeArray("vertexIn"); m_program.setAttributeArray("vertexIn",vertices.constData()); m_program.enableAttributeArray("textureIn"); m_program.setAttributeArray("textureIn",textures.constData());
glActiveTexture(GL_TEXTURE0); mTexY->bind();
glActiveTexture(GL_TEXTURE1); mTexU->bind();
glActiveTexture(GL_TEXTURE2); mTexV->bind();
m_program.setUniformValue("textureY",0); m_program.setUniformValue("textureU",1); m_program.setUniformValue("textureV",2); glDrawArrays(GL_QUADS,0,4); m_program.disableAttributeArray("vertexIn"); m_program.disableAttributeArray("textureIn"); m_program.release(); }
|
QML显示部分
此部分需要两个类VideoItem、VideoFboItem,VideoItem是QML调用的接口,而VideoFboItem是由VideoItem自动调用。
VideoFboItem
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42
| class VideoFboItem : public QQuickFramebufferObject::Renderer { public: VideoFboItem(){ m_render.init(); }
void render() override{ m_render.paint(); m_window->resetOpenGLState(); } QOpenGLFramebufferObject *createFramebufferObject(const QSize &size) override{ QOpenGLFramebufferObjectFormat format; format.setAttachment(QOpenGLFramebufferObject::CombinedDepthStencil); format.setSamples(4); m_render.resize(size.width(), size.height()); return new QOpenGLFramebufferObject(size, format); } void synchronize(QQuickFramebufferObject *item) override{ VideoItem *pItem = qobject_cast<VideoItem *>(item); if (pItem) { if (!m_window) { m_window = pItem->window(); } if (pItem->infoDirty()) { m_render.updateTextureInfo(pItem->videoWidth(), pItem->videoHeght()); pItem->makeInfoDirty(false); } ba = pItem->getFrame(); m_render.updateTextureData(ba); } } private: I420Render m_render; QQuickWindow *m_window = nullptr;
YUVData ba; };
|
render/createFramebufferObject/synchronize这三个函数是直接继承基类重写。render调用OpenGL渲染的paint函数绘制界面,createFramebufferObject写法固定,synchronize从VideoItem的接口中获取需要的信息(图像宽度高度、具体数据)。
这些函数由VideoItem函数自动调用。
VideoItem
此类是直接提供QML调用的接口,并且与Videoitem类由交互。
| QQuickFramebufferObject::Renderer *VideoItem::createRenderer() const { return new VideoFboItem; }
|
这样之后VideoItem就会自动调用VideoFboItem的函数。
QML调用
首先在QML格式文件中导入视频模块
然后在根组件中插件视频模块对象
| VideoItem{ id:videoitem anchors.fill: parent }
|
之后如果需要调用视频模块的接口,直接使用videoitem就可以了。
| Button { id: button x: 29 y: 27 text: qsTr("Play")
onClicked: { videoitem.setUrl("/home/jackey/Videos/Sample.mkv") videoitem.start() } }
|
按钮按下之后调用videoitem的setUrl/start接口。
效果
默认界面为:

点击Play按钮后,开始播放视频:

以上是PC下默认解码YUV420P显示方法,如果需要NV12(YUV420SP)显示可以使用之前的方法。
源码在Github中10.video_decode_by_cpu_display_by_qml
下。
下一篇:FFmpeg4入门11:CUDA硬解并使用Qt播放视频(YUV420SP->RGB32)