How to play YUV video in Qt4?
I want to play YUV video sequence by using Qt. Now I am using QPixmap, by using DrawPixel on QPixmap pixel by pixel. However, it can't play the video in real-time. How can I do to imp开发者_Go百科rove the speed?
Did you try using the Phonon classes like VideoPlayer?
Take a look at this:
http://doc.qt.io/archives/4.6/phonon-videoplayer.html
Pixel by pixel is about the slowest method to create a picture. It would improve performance a lot if you processed the image data before and used QPixmap's loadFromData() method.
Well, DrawPixel is definetily the worst perfomance solution.
QOpenGLWiget nowadays (Qt 5) could be used for rendering video frames to a texture. Actually, depending on the video pixel format, it could be either simple texture rendering or a pixel format conversion via shaders with further texture drawing.
The question is old, so I'll leave a sketchy solution just because it took me some time to get to it myself once. So, the simpliest (not best, because lots of optimizations are possible) solution is:
OpenGLDisplayRGB.h
#pragma once
#include <QOpenGLWidget>
#include <QOpenGLFunctions>
#include <QScopedPointer>
#include <QException>
/*!
* \brief The OpenGLDisplay class
* Simple OpenGL display, that renders RGBA to texture
*/
class OpenGLDisplayRGB : public QOpenGLWidget, public QOpenGLFunctions
{
Q_OBJECT
public:
explicit OpenGLDisplayRGB(QWidget* parent = nullptr);
~OpenGLDisplayRGB() override;
protected:
void initializeGL() override;
void resizeGL(int w, int h) override;
void paintGL() override;
void closeEvent(QCloseEvent* e) override;
public:
void DisplayVideoFrame(unsigned char* data, int frameWidth, int frameHeight);
Q_SIGNAL void closed();
private:
struct OpenGLDisplayRGBImpl;
QScopedPointer<OpenGLDisplayRGBImpl> impl;
};
OpenGLDisplayRGB.cpp
#include "OpenGLDisplayRGB.h"
#include <QOpenGLShader>
#include <QOpenGLTexture>
#include <QCoreApplication>
#include <QResizeEvent>
#include <QTimer>
#include <QDebug>
#define ATTRIB_VERTEX 0
#define ATTRIB_TEXTURE 1
namespace
{
//Vertex matrix
static const GLfloat vertexVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
//Texture matrix
static const GLfloat textureVertices[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
}
struct OpenGLDisplayRGB::OpenGLDisplayRGBImpl
{
OpenGLDisplayRGBImpl(QObject* ownerPtr)
: mBufRGB(nullptr)
//, mRepaintTimer(new QTimer(ownerPtr))
, mEnabled(true)
, mShaderProgram(new QOpenGLShaderProgram(ownerPtr))
, mTexture(new QOpenGLTexture(QOpenGLTexture::Target2D))
{ }
unsigned char* mBufRGB;
//QTimer* mRepaintTimer;
bool mEnabled;
QOpenGLShader* mVShader;
QOpenGLShader* mFShader;
QOpenGLShaderProgram* mShaderProgram;
QScopedPointer<QOpenGLTexture> mTexture;
int mTextureUniform;
GLsizei mVideoW, mVideoH;
};
/*************************************************************************/
OpenGLDisplayRGB::OpenGLDisplayRGB(QWidget* parent)
: QOpenGLWidget(parent)
, impl(new OpenGLDisplayRGBImpl(this))
{
setAttribute(Qt::WA_OpaquePaintEvent);
// setAttribute(Qt::WA_PaintOnScreen);
setAttribute(Qt::WA_NoSystemBackground);
/*
impl->mRepaintTimer->setInterval(50);
connect(impl->mRepaintTimer, SIGNAL(timeout()), this, SLOT(update()));
impl->mRepaintTimer->start();*/
}
OpenGLDisplayRGB::~OpenGLDisplayRGB()
{
makeCurrent();
}
void OpenGLDisplayRGB::DisplayVideoFrame(unsigned char *data, int frameWidth, int frameHeight)
{
impl->mVideoW = frameWidth;
impl->mVideoH = frameHeight;
impl->mBufRGB = data;
update();
}
void OpenGLDisplayRGB::initializeGL()
{
initializeOpenGLFunctions();
glEnable(GL_DEPTH_TEST);
/* Modern opengl rendering pipeline relies on shaders to handle incoming data.
* Shader: is a small function written in OpenGL Shading Language (GLSL).
* GLSL is the language that makes up all OpenGL shaders.
* The syntax of the specific GLSL language requires the reader to find relevant information. */
impl->mEnabled = impl->mShaderProgram->addShaderFromSourceFile(QOpenGLShader::Vertex, ":/OpenGL/simple_vertex_shader.v.glsl");
if(!impl->mEnabled)
qDebug() << QString("[Error] Vertex shader failed: %1").arg(impl->mShaderProgram->log());
impl->mShaderProgram->addShaderFromSourceFile(QOpenGLShader::Fragment, ":/OpenGL/simple_texture_shader.f.glsl");
if(!impl->mEnabled)
qDebug() << QString("[Error] Fragment shader failed: %1").arg(impl->mShaderProgram->log());
// Bind the property vertexIn to the specified location ATTRIB_VERTEX, this property
// has a declaration in the vertex shader source
impl->mShaderProgram->bindAttributeLocation("vertexIn", ATTRIB_VERTEX);
// Bind the attribute textureIn to the specified location ATTRIB_TEXTURE, the attribute
// has a declaration in the vertex shader source
impl->mShaderProgram->bindAttributeLocation("textureIn", ATTRIB_TEXTURE);
//Link all the shader programs added to
impl->mShaderProgram->link();
//activate all links
impl->mShaderProgram->bind();
// Read the position of the data variable tex_rgb in the shader, the declaration
// of these variables can be seen in
// fragment shader source
impl->mTextureUniform = impl->mShaderProgram->uniformLocation("uSampler");
// Set the value of the vertex matrix of the attribute ATTRIB_VERTEX and format
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, vertexVertices);
// Set the texture matrix value and format of the attribute ATTRIB_TEXTURE
glVertexAttribPointer(ATTRIB_TEXTURE, 2, GL_FLOAT, 0, 0, textureVertices);
// Enable the ATTRIB_VERTEX attribute data, the default is off
glEnableVertexAttribArray(ATTRIB_VERTEX);
// Enable the ATTRIB_TEXTURE attribute data, the default is off
glEnableVertexAttribArray(ATTRIB_TEXTURE);
impl->mTexture->create();
impl->mTexture->setMinMagFilters(QOpenGLTexture::Linear, QOpenGLTexture::Linear);
impl->mTexture->setWrapMode(QOpenGLTexture::ClampToEdge);
glClearColor (1.0f, 0.0f, 1.0f, 1.0f); // set the background color
}
void OpenGLDisplayRGB::resizeGL(int w, int h)
{
if(h == 0)// prevents being divided by zero
h = 1;// set the height to 1
// Set the viewport
glViewport(0, 0, w, h);
}
void OpenGLDisplayRGB::paintGL()
{
if (!impl->mEnabled || !impl->mBufRGB)
return; //RET
// Load y data texture
// Activate the texture unit GL_TEXTURE0
glActiveTexture(GL_TEXTURE0);
// Use the texture generated from y to generate texture
glBindTexture(GL_TEXTURE_2D, impl->mTexture->textureId());
// Use the memory mBufYuv data to create a real y data texture
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, impl->mVideoW, impl->mVideoH, 0, GL_RGBA, GL_UNSIGNED_BYTE, impl->mBufRGB);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// Specify y texture to use the new value can only use 0, 1, 2, etc. to represent
// the index of the texture unit, this is the place where opengl is not humanized
//0 corresponds to the texture unit GL_TEXTURE0 1 corresponds to the
// texture unit GL_TEXTURE1 2 corresponds to the texture unit GL_TEXTURE2
glUniform1i(impl->mTextureUniform, 0);
// Use the vertex array way to draw graphics
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
void OpenGLDisplayRGB::closeEvent(QCloseEvent *e)
{
emit closed();
e->accept();
}
simple_texture_shader.f.glsl
varying vec2 vTextureCoord;
uniform sampler2D uSampler;
void main(void)
{
gl_FragColor = texture2D(uSampler, vTextureCoord);
}
simple_vertex_shader.v.glsl
attribute vec4 vertexIn;
attribute vec2 textureIn;
varying vec2 vTextureCoord;
void main(void)
{
gl_Position = vertexIn;
vTextureCoord = textureIn;
}
精彩评论