This is the 28th day of my participation in the August Text Challenge.More challenges in August

An overview,

1.1 introduction

In the last article, we talked about how to cut, splicing and rotate Y420P video data, but the disadvantages are also obvious. First, it is a large workload and a large amount of code. Second, the fault tolerance rate is low, because it involves a large number of floating point calculation, resulting in errors in data copy, may be rendered when Y, UV components have problems, video screen, deformation, etc. Meanwhile, YUV data copy and calculation are handed over to the CPU, which is inefficient.

In this paper, we take another better way, using OpengL vertex and texture coordinates to make an article, transform coordinates to determine the video block playing position, less code, high fault tolerance rate, easy to understand.

1.2 Concept Explanation

1.2.1 Vertex arrays and vertex coordinates

Vertex array is a feature provided by OpenGL 1.1, which specifies the coordinates of data rendering when drawing various graphs, representing a geometric figure. To avoid going off the rails, we’re not going to explain opengL too much. We just need to know how we use vertex arrays for our purposes in this article. In a nutshell, is the vertex array defines an array contains a list of the coordinates, opengl is based on the vertex of the grid map, the geometry is to connect the vertices according to certain rules, then the texture coordinates of four coordinate points, mapping to the vertex, then the opengl texture applied to the four vertices of graph.

As shown in the figure above, Figure 1 is called vertex coordinates, and Figure 2 is called texture coordinates. For the convenience of graphics card calculation, OpengL requires normalization of coordinates, so the range of x and y of vertex coordinates is [-1,1].

1.2.1 Texture array and texture coordinates

Firstly, the interval range of texture coordinates is [0,1], corresponding to 4 points of vertex coordinates respectively. Second, considering the coordinates of the computer screen, the top left corner is (0,0), the X-axis is forward, the Y-axis is forward, and the texture coordinates are upside down. Look at the TexTure above. If you flip it up and down so that the Y-axis is downward, it will correspond to the computer coordinates. And more importantly, if you flip it up, its four points will correspond to the vertex coordinates one by one. Therefore, we must make clear the corresponding relationship between vertex coordinates and texture coordinates. When constructing the specific array data, we need to flip the texture array up and down to match the vertex array one by one.

Second, the practice

Since our video source is in Y420P format, we need to use YUV to render it, without nonsense, directly to the code, which can be copied to the project for direct use, provided that VLC is configured and QT5.12.0 version is used.

#pragma once

class I420Image
{
public:
    I420Image(int w, int h)
        : width(w)
        , height(h)
    {
        data = new uint8_t[w*h + w * h / 2];
    }
    ~I420Image()
    {
        delete[]data;
    }

    int GetWidth() const { return width; }
    int GetHeight() const { return height; }
    uint8_t *GetY() const { return data; }
    uint8_t *GetU()const { return data + width * height; }
    uint8_t *GetV()const { return data + width * height + width * height / 4; }
protected:
public:
    int width = 0;
    int height = 0;
    uint8_t * data;
};
Copy the code

I420Image is the class that stores video frames after we parse them out of VLC.

#pragma once
#include <QOpenGLWidget>
#include <QOpenGLFunctions>
#include "I420Image.h"

struct libvlc_media_track_info_t;
struct libvlc_media_t;
struct libvlc_instance_t;
struct libvlc_media_player_t;
struct libvlc_event_t;
class vlcOpenglTest : public QOpenGLWidget, public QOpenGLFunctions
{
    Q_OBJECT

public:
    explicit vlcOpenglTest(QWidget *parent = 0);
    ~vlcOpenglTest();
public:
    void play();

public:
    static void *lock_cb(void *opaque, void **planes);
    static void unlock_cb(void *opaque, void *picture, void *const *planes);
    static void display_cb(void *opaque, void *picture);
    static unsigned setup_cb(void **opaque, char *chroma,
        unsigned *width, unsigned *height,
        unsigned *pitches,
        unsigned *lines);
    static void cleanup_cb(void *opaque);

protected:
    virtual void initializeGL() override;
    virtual void paintGL() override;
private:
    void InitShaders();
    I420Image *m_Front;
    I420Image *m_Back;
    GLuint program;
    GLuint tex_y, tex_u, tex_v;
    GLuint sampler_y, sampler_u, sampler_v;
    libvlc_instance_t* m_vlc;
    libvlc_media_player_t *m_vlcplayer;
};
Copy the code
#include "vlcOpenglTest.h"

#ifdef _WIN32
#include <basetsd.h>
typedef SSIZE_T ssize_t;
#endif
#include "vlc/vlc.h"
#include <QPainter>
#include <QOpenGLFunctions_2_0>

using namespace std;

static const char *vertexShader = "\
	#version 430 core\n \
	layout(location = 0) in vec4 vertexIn; \
	layout(location = 1) in vec2 textureIn; \
	out vec2 textureOut;  \
	void main(void)\
	{\
		gl_Position =vertexIn ;\
		textureOut = textureIn;\
	}";

static const char *fragmentShader = "\
#version 430 core\n \
in vec2 textureOut;\
uniform sampler2D tex_y;\
uniform sampler2D tex_u;\
uniform sampler2D tex_v;\
void main(void)\
{\
    vec3 yuv;\
    vec3 rgb;\
    yuv.x = texture2D(tex_y, textureOut).r;\
    yuv.y = texture2D(tex_u, textureOut).r - 0.5;\
    yuv.z = texture2D(tex_v, textureOut).r - 0.5;\
    rgb = mat3( 1,       1,         1,\
                0,       -0.39465,  2.03211,\
                1.13983, -0.58060,  0) * yuv;\
    gl_FragColor = vec4(rgb, 1.0);\
}";


vlcOpenglTest::vlcOpenglTest(QWidget *parent) :
    QOpenGLWidget(parent),
    m_vlcplayer(NULL),
    m_vlc(NULL),
    m_Front(NULL),
    m_Back(NULL)
{
    setGeometry(0, 0, 500, 500);
    m_vlc = libvlc_new(0, 0);

    m_vlcplayer = libvlc_media_player_new(m_vlc);
    libvlc_video_set_callbacks(m_vlcplayer, lock_cb, unlock_cb, display_cb, this);
    libvlc_video_set_format_callbacks(m_vlcplayer, setup_cb, cleanup_cb);
}
vlcOpenglTest::~vlcOpenglTest()
{
    stop();
    libvlc_release(m_vlc);
}


void vlcOpenglTest::play()
{
    QString path = "file:///D:\\ky.mp4";
    libvlc_media_t *pmedia = libvlc_media_new_location(m_vlc, path.toLocal8Bit().data());

    libvlc_media_add_option(pmedia, ":rtsp-tcp=true");
    libvlc_media_add_option(pmedia, ":network-caching=300");
    libvlc_media_player_set_media(m_vlcplayer, pmedia);
    libvlc_media_player_play(m_vlcplayer);

    libvlc_media_release(pmedia);
}

void *vlcOpenglTest::lock_cb(void *opaque, void **planes)
{

    vlcOpenglTest *pthis = static_cast<vlcOpenglTest*>(opaque);

    planes[0] = pthis->m_Back->GetY();
    planes[1] = pthis->m_Back->GetU();
    planes[2] = pthis->m_Back->GetV();

    return pthis->m_Back;
}

void vlcOpenglTest::unlock_cb(void *opaque, void *picture, void * const *planes)
{
    vlcOpenglTest *pthis = static_cast<vlcOpenglTest*>(opaque);

    I420Image* p = pthis->m_Front;
    pthis->m_Front = pthis->m_Back;
    pthis->m_Back = p;
}

void vlcOpenglTest::display_cb(void *opaque, void *picture)
{
    vlcOpenglTest *pthis = static_cast<vlcOpenglTest*>(opaque);
    pthis->update();
}

unsigned vlcOpenglTest::setup_cb(void **opaque, char *chroma, unsigned *width, unsigned *height, unsigned *pitches, unsigned *lines)
{
    vlcOpenglTest *pthis = static_cast<vlcOpenglTest*>(*opaque);
    assert(pthis);

    pthis->m_Front = new I420Image(*width, *height);
    pthis->m_Back = new I420Image(*width, *height);

    pitches[0] = *width;
    lines[0] = *height;

    pitches[1] = pitches[2] = *width / 2;
    lines[1] = lines[2] = *height / 2;

    return 1;
}

void vlcOpenglTest::cleanup_cb(void *opaque)
{
    vlcOpenglTest *pthis = static_cast<vlcOpenglTest*>(opaque);
    assert(pthis);
    if (pthis->m_Front)
    {
        delete pthis->m_Front;
        pthis->m_Front = nullptr;
    }
    if (pthis->m_Back)
    {
        delete pthis->m_Back;
        pthis->m_Back = nullptr;
    }
}

void vlcOpenglTest::initializeGL()
{

    initializeOpenGLFunctions();
    InitShaders();

}


void vlcOpenglTest::paintGL()
{
    // 清除缓冲区
    glClearColor(0.0, 0.0, 0.0, 0.0);
    glClear(GL_COLOR_BUFFER_BIT);

    if (m_Front)
    {

#ifdef QT_NO_DEBUG 
        //release 发布时避免第一帧无数据红屏
        if (*m_Front->GetY() == '\0')
        {
            qDebug() << "data frame  Uninitialized completion  return";
            return;
        }
#endif
        int sourceW = m_Front->GetWidth();
        int sourceH = m_Front->GetHeight();
        int srcLength = 0;
        int desW = sourceW;
        int desH = sourceH;
        
        /*Y*/
        glActiveTexture(GL_TEXTURE0);
        glBindTexture(GL_TEXTURE_2D, tex_y);
        glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, desW, desH, 0, GL_RED, GL_UNSIGNED_BYTE, (GLvoid*)m_Front->data);
        glUniform1i(sampler_y, 0);

        /*U*/
        glActiveTexture(GL_TEXTURE1);
        glBindTexture(GL_TEXTURE_2D, tex_u);
        glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, desW / 2, desH / 2, 0, GL_RED, GL_UNSIGNED_BYTE, (GLvoid*)(m_Front->data + desW * desH));
        glUniform1i(sampler_u, 1);

        /*V*/
        glActiveTexture(GL_TEXTURE2);
        glBindTexture(GL_TEXTURE_2D, tex_v);
        glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, desW / 2, desH / 2, 0, GL_RED, GL_UNSIGNED_BYTE, (GLvoid*)(m_Front->data + desW * desH * 5 / 4));
        glUniform1i(sampler_v, 2);
        //整图
        glDrawArrays(GL_TRIANGLE_FAN, 0, 4);

        //四宫格
        //for (int i = 0; i < 4; i++)
        //{
        //    glDrawArrays(GL_TRIANGLE_FAN, 4 * i, 4);
        //}
        glFlush();
    }
}

void vlcOpenglTest::InitShaders()
{
        static const GLfloat vertexVertices[] = 
        {
            //整图
            -1.0f,  -1.0f,
            1.0f,  -1.0f,
            1.0f,  1.0f,
            -1.0f,  1.0f,

            //四宫格
        	// 顶点逆时针旋转
        	//-1.0f,  0.0f,		// 左上角区域
        	// 0.0f,  0.0f,
        	// 0.0f,  1.0f,
        	//-1.0f,  1.0f,

        	//0.2f, 0.2f,			// 右上角区域
        	//1.0f, 0.2f,
        	//1.0f, 1.0f,
        	//0.2f, 1.0f,

        	//-1.0f,  -1.0f,		// 左下角区域
        	// 0.0f,  -1.0f,
        	// 0.0f,  -0.2f,
        	//-1.0f,  -0.2f,

        	//0.2f, -1.0f,		// 右下角区域
        	//1.0f, -1.0f,
        	//1.0f, 0.0f,
        	//0.2f, 0.0f,
        };

        static  const GLfloat textureVertices[] = {
            //整图
            0.0f, 1.0f,
            1.0f, 1.0f,
            1.0f, 0.0f,
            0.0f, 0.0f,

            //四宫格
        	// (Y按照和顶点一样的方式逆时针4个点,从左下角开始),然后1- Y (翻转处理,不然出来的图像是反的)
        	//0.0f, 1 - 0.5f,		// 左上角区域
        	//0.5f, 1 - 0.5f,
        	//0.5f, 1 - 1.0f,
        	//0.0f, 1 - 1.0f,

        	//0.5f,  1 - 0.5f,		// 右上角区域
        	//1.0f,  1 - 0.5f,
        	//1.0f,  1 - 1.0f,
        	//0.5f,  1 - 1.0f,

        	//0.0f, 1 - (0.5f - 0.5f),		// 左下角区域
        	//0.5f, 1 - (0.5f - 0.5f),
        	//0.5f, 1 - (1.0f - 0.5f),
        	//0.0f, 1 - (1.0f - 0.5f),

        	//0.5f,  1 - (0.5f - 0.5f),	// 右下角区域
        	//1.0f,  1 - (0.5f - 0.5f),
        	//1.0f,  1 - (1.0f - 0.5f),
        	//0.5f,  1 - (1.0f - 0.5f),
        };

    GLint vertCompiled, fragCompiled, linked;
    GLint v, f;

    //Shader: step1
    v = glCreateShader(GL_VERTEX_SHADER);
    f = glCreateShader(GL_FRAGMENT_SHADER);

    //Shader: step2
    glShaderSource(v, 1, &vertexShader, NULL);
    glShaderSource(f, 1, &fragmentShader, NULL);

    //Shader: step3
    glCompileShader(v);
    glGetShaderiv(v, GL_COMPILE_STATUS, &vertCompiled);    //Debug

    glCompileShader(f);
    glGetShaderiv(f, GL_COMPILE_STATUS, &fragCompiled);    //Debug

    //Program: Step1
    program = glCreateProgram();
    //Program: Step2
    glAttachShader(program, v);
    glAttachShader(program, f);


    glVertexAttribPointer(0, 2, GL_FLOAT, 0, 0, vertexVertices);
    glEnableVertexAttribArray(0);

    glVertexAttribPointer(1, 2, GL_FLOAT, 0, 0, textureVertices);
    glEnableVertexAttribArray(1);


    //Program: Step3
    glLinkProgram(program);
    //Debug
    glGetProgramiv(program, GL_LINK_STATUS, &linked);

    glUseProgram(program);

    //Get Uniform Variables Location
    sampler_y = glGetUniformLocation(program, "tex_y");
    sampler_u = glGetUniformLocation(program, "tex_u");
    sampler_v = glGetUniformLocation(program, "tex_v");

    //Init Texture
    glGenTextures(1, &tex_y);
    glBindTexture(GL_TEXTURE_2D, tex_y);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

    glGenTextures(1, &tex_u);
    glBindTexture(GL_TEXTURE_2D, tex_u);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

    glGenTextures(1, &tex_v);
    glBindTexture(GL_TEXTURE_2D, tex_v);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

}
Copy the code

The code only shows how to use OpengL to render a single video frame and how to use vertices to split into 4 frames, same with 9 grids, but need to calculate vertex coordinates. InitShaders has 4 vertices in an image, 16 vertices in 4 images, and 36 vertices in 9 images, so in practice this method can be modified to dynamic assignment.

Splicing words, you need to go according to the position you want to splice, the texture region corresponding to the vertex region you want to go, but also can exchange display areas, in short, how you want to display how to display.


If there is any help, please give more likes to support oh.