学习v4l2视频解码之前需要先了解v4l2的视频采集流程,可以看我的另外一篇文章:v4l2采集视频,文章对v4l2的视频采集进行了详细的介绍。

        了解了v4l2的视频采集流程,对v4l2的视频解码流程就很好理解了,如下图所示是v4l2的视频解码流程:

v4l2视频解码-LMLPHP

        v4l2解码需要两个队列:输入队列、输出队列,每个队列都是不断地VIDIOC_DQBUF和VIDIOC_QBUF的过程,即入队和出队。

        v4l2解码输入输出队列创建方式和v4l2视频采集队列创建方式类似,只不过队列类型不一样,如下图所示,是输入、输出队列地创建流程:

v4l2视频解码-LMLPHP

        完整代码(代码来自v4l2的实际调用方案_v4l2 案例-CSDN博客,这里添加了注释,对代码进行解读):

#include <cstdarg>
#include <cstdio>
#include <cstring>
#include <fstream>
#include <iostream>
#include <map>
#include <vector>

#include <fcntl.h>
#include <poll.h>
#include <stdint.h>
#include <unistd.h>
#include <linux/videodev2.h>
#include <sys/ioctl.h>

using namespace std;

/****************************************************************************
 * Exception
 ****************************************************************************/

class Exception :
    public std::exception
{
public:
    Exception(const char *fmt, ...);
    Exception(const std::string &str);
    virtual ~Exception() throw();

    virtual const char *what() const throw();

private:
    char msg[100];
};

Exception::Exception(const char *fmt, ...)
{
    va_list ap;

    va_start(ap, fmt);
    vsnprintf(msg, sizeof(msg), fmt, ap);
    va_end(ap);
}

Exception::Exception(const string &str)
{
    strncpy(msg, str.c_str(), sizeof(msg));
}

Exception::~Exception() throw()
{}

const char *Exception::what() const throw()
{
    return msg;
}

/*****************************************************************************
 * Buffer user pointer
 *****************************************************************************/
class BufferUserPtr
{
public:
    void queryBuffer(int fd, v4l2_buf_type type, uint32_t index);
    void resize();

    void clear();
    void queue(int fd);
    void dequeue(v4l2_buffer &b);

    static void printBuffer(const v4l2_buffer &buf, const char *prefix);

    v4l2_buffer vbuf; // VIDIOC_REQBUFS分配的多个缓冲区中的某一个缓冲区,通过vbuf.index区分
    v4l2_plane vplanes[VIDEO_MAX_PLANES]; // vbuf中的多平面内存
    vector<char> userptr[VIDEO_MAX_PLANES]; // 用户内存
};

void BufferUserPtr::queryBuffer(int fd, v4l2_buf_type type, uint32_t index)
{
    vbuf.type = type;
    vbuf.index = index;
    vbuf.length = VIDEO_MAX_PLANES;
    vbuf.m.planes = vplanes;
    /*查询内核缓冲区信息 获取每个帧缓冲区的信息*/
    int ret = ioctl(fd, VIDIOC_QUERYBUF, &vbuf);
    if (ret != 0)
    {
        throw Exception("Failed to query buffer.");
    }

    printBuffer(vbuf, "Query");
}
void BufferUserPtr::resize()
{
    for (unsigned int i = 0; i < VIDEO_MAX_PLANES; ++i) // 最大VIDEO_MAX_PLANES个平面,V4L2_PIX_FMT_YUV420M实际就用到3个
    {
        userptr[i].clear();
    }
    if (V4L2_TYPE_IS_MULTIPLANAR(vbuf.type)) // V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE ,解码后数据队列
    {
        for (uint32_t i = 0; i < vbuf.length; ++i) // vbuf.length =3 由于是V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE和V4L2_PIX_FMT_YUV420M内核把vbuf.length改成3,使用3个planes分别存储y u v
        {
            userptr[i].resize(vbuf.m.planes[i].length); // userptr数组和 planes数组大小和索引一一对用
        }
    }
    else // V4L2_BUF_TYPE_VIDEO_OUTPUT 解码器输入队列 vbuf.length就是4*1024*1024(VIDIOC_S_FMT时候因为是V4L2_BUF_TYPE_VIDEO_OUTPUT,内核把vbuf.length改成4*1024*1024) V4L2_PIX_FMT_H264下只用userptr[0]即可
    {
        userptr[0].resize(vbuf.length);
    }
}

void BufferUserPtr::clear()
{
    // 解码后的平面数据使用v4l2_buffer.m.planes[i](struct v4l2_plane*)存储,h264数据使用v4l2_buffer.m.userptr存储
    if (V4L2_TYPE_IS_MULTIPLANAR(vbuf.type)) // 清空解码后数据缓存
    {
        for (unsigned int i = 0; i < min(vbuf.length, (uint32_t)VIDEO_MAX_PLANES); ++i)//min(vbuf.length, (uint32_t)VIDEO_MAX_PLANES),因为内核会更具实际的格式改变vbuf.length,所以这里取最小
        {
            v4l2_plane &plane = vbuf.m.planes[i];

            plane.bytesused = 0;
            plane.m.userptr = (unsigned long)&userptr[i][0];
            plane.data_offset = 0;
        }
    }
    else // 清空h264缓存
    {
        vbuf.bytesused = 0;
        vbuf.m.userptr = (unsigned long)&userptr[0][0];
    }
}

void BufferUserPtr::queue(int fd)
{
    printBuffer(vbuf, "->");

    int ret = ioctl(fd, VIDIOC_QBUF, &vbuf);
    if (ret != 0)
    {
        throw Exception("Failed to queue buffer.");
    }
}

// 这里的dequeue表示我们把b中的数据保存到BufferUserPtr中的成员变量vbuf中
void BufferUserPtr::dequeue(v4l2_buffer &b)
{
    vbuf = b;

    if (V4L2_TYPE_IS_MULTIPLANAR(vbuf.type))
    {
        vbuf.m.planes = vplanes;
        for (uint32_t i = 0; i < min(vbuf.length, (uint32_t)VIDEO_MAX_PLANES); ++i)
        {
            vbuf.m.planes[i] = b.m.planes[i];
        }
    }

    printBuffer(vbuf, "<-");
}

void BufferUserPtr::printBuffer(const v4l2_buffer &buf, const char *prefix)
{
    cout << prefix << ": " <<
            "type=" << buf.type <<
            ", index=" << buf.index <<
            ", sequence=" << buf.sequence <<
            ", flags=" << hex << buf.flags << dec;

    if (V4L2_TYPE_IS_MULTIPLANAR(buf.type))
    {
        const char *delim;

        cout << ", num_planes=" << buf.length;

        delim = "";
        cout << ", bytesused=[";
        for (unsigned int i = 0; i < buf.length; ++i)
        {
            cout << delim << buf.m.planes[i].bytesused;
            delim = ", ";
        }
        cout << "]";

        delim = "";
        cout << ", length=[";
        for (unsigned int i = 0; i < buf.length; ++i)
        {
            cout << delim << buf.m.planes[i].length;
            delim = ", ";
        }
        cout << "]";

        delim = "";
        cout << ", offset=[";
        for (unsigned int i = 0; i < buf.length; ++i)
        {
            cout << delim << buf.m.planes[i].data_offset;
            delim = ", ";
        }
        cout << "]";

        delim = "";
        cout << ", userptr=[";
        for (unsigned int i = 0; i < buf.length; ++i)
        {
            cout << delim << hex << buf.m.planes[i].m.userptr << dec;
            delim = ", ";
        }
        cout << "]";
    }
    else
    {
        cout << ", bytesused=" << buf.bytesused <<
                ", length=" << buf.length;
    }

    cout << endl;
}

/*****************************************************************************
 * Port
 *****************************************************************************/
class Port
{
public:
    Port(int &fd, v4l2_buf_type type, const char *filename);//fd引用传递

    void getSetFormat(uint32_t pixelformat, uint32_t sizeimage = 0);
    void requestBuffers(unsigned int count);
    unsigned int getBufferCount();
    void queueBuffer(BufferUserPtr &buf);
    BufferUserPtr &dequeueBuffer();
    void fillAndQueue(BufferUserPtr &buf);
    void clearAndQueue(BufferUserPtr &buf);
    void dumpBuffer(BufferUserPtr &buf);
    void streamOn();
    void streamOff();

    unsigned int pending;
    vector<BufferUserPtr> buffers;

private:
    v4l2_format getFormat();
    void setFormat(v4l2_format &format);

    int &fd; // 确保和构造函数传递进来的fd是同一个文件描述符
    v4l2_buf_type type;
    fstream file;
};

Port::Port(int &fd, v4l2_buf_type type, const char *filename) :
    pending(0),
    fd(fd),
    type(type)
{
    ios::openmode flags = V4L2_TYPE_IS_OUTPUT(type) ? ios::in : ios::out;
    file.open(filename, flags);
}
/*
 * pixelformat:
 *   输入:V4L2_PIX_FMT_H264 
 *   输出:V4L2_PIX_FMT_YUV420M
 */
void Port::getSetFormat(uint32_t pixelformat, uint32_t sizeimage)
{
    v4l2_format format = getFormat(); // 获取type对应的v4l2_format,从摄像头读取视频需要VIDIOC_S_FMT设置读取视频的参数,而解码需要使用VIDIOC_G_FMT获取对应的r参数(填充字段默认值)然后再VIDIOC_S_FMT(修改参数设置解码器)

    if (V4L2_TYPE_IS_MULTIPLANAR(type)) // type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE -> 多平面 pix_mp-struct v4l2_pix_format_mplane,解码数据输出缓冲区output
    {
        format.fmt.pix_mp.pixelformat = pixelformat; // V4L2_PIX_FMT_YUV420M
    } 
    else // type == V4L2_BUF_TYPE_VIDEO_OUTPUT -> pix-struct v4l2_pix_format,h264送入解码器的输入缓冲区intput
    {
        format.fmt.pix.pixelformat = pixelformat; // V4L2_PIX_FMT_H264
        format.fmt.pix.sizeimage = sizeimage; // buffer maxsize
    }

    setFormat(format);
}

v4l2_format Port::getFormat()
{
    v4l2_format format = { .type = type };
    int ret = ioctl(fd, VIDIOC_G_FMT, &format);
    if (ret != 0)
    {
        if(type==V4L2_PIX_FMT_H264){
            throw Exception("Failed to get format V4L2_PIX_FMT_H264.");
        }
        else{
            throw Exception("Failed to get format V4L2_PIX_FMT_YUV420M.");
        }
    }

    return format;
}

void Port::setFormat(v4l2_format &format)
{
    int ret = ioctl(fd, VIDIOC_S_FMT, &format);
    if (ret != 0)
    {
        throw Exception("Failed to set format.");
    }
}

void Port::requestBuffers(unsigned int count)
{
    /* Request new buffer to be allocated. */
    v4l2_requestbuffers reqbuf;
    reqbuf.count = count;
    reqbuf.type = type; // 解码输出:V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE  解码输入:V4L2_BUF_TYPE_VIDEO_OUTPUT
    reqbuf.memory = V4L2_MEMORY_USERPTR; // 使用用户分配的内存,不用驱动分配的内存
    int ret = ioctl(fd, VIDIOC_REQBUFS, &reqbuf);
    if (ret != 0)
    {
        throw Exception("Failed to request buffers.");
    }

    buffers.resize(reqbuf.count); // 分配reqbuf.coun个用户内存缓冲区

    /* Query each buffer and create a new meta buffer. */
    for (uint32_t i = 0; i < reqbuf.count; ++i)
    {
        buffers[i].queryBuffer(fd, type, i);
        buffers[i].resize();
    }
}

unsigned int Port::getBufferCount()
{
    v4l2_control control;
    control.id = V4L2_TYPE_IS_OUTPUT(type) ? V4L2_CID_MIN_BUFFERS_FOR_OUTPUT : V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;

    int ret = ioctl(fd, VIDIOC_G_CTRL, &control);
    if (ret != 0)
    {
        throw Exception("Failed to get minimum buffers.");
    }

    return control.value;
}
// 入队
void Port::queueBuffer(BufferUserPtr &buf)
{
    buf.queue(fd);
    ++pending;
}
// 出队
BufferUserPtr &Port::dequeueBuffer()
{
    v4l2_buffer vbuf;
    v4l2_plane planes[VIDEO_MAX_PLANES];

    vbuf.type = type;
    vbuf.m.planes = planes;
    vbuf.length = VIDEO_MAX_PLANES;

    int ret = ioctl(fd, VIDIOC_DQBUF, &vbuf);
    if (ret != 0)
    {
        throw Exception("Failed to dequeue buffer. type=%u, memory=%u",
                        vbuf.type, vbuf.memory);
    }

    --pending;
    BufferUserPtr &buf = buffers[vbuf.index];
    buf.dequeue(vbuf);

    return buf;
}
/*
 * 实时流需要这里等待,直到有数据到来,但是这样可能会导致已经解码的数据取不出来(因为这里是阻塞的),具体根据实际需求进行更改
 * 不可以传入空buff给队列
 */
void Port::fillAndQueue(BufferUserPtr &buf)
{
    buf.clear();
    file.read((char *)buf.vbuf.m.userptr, buf.vbuf.length); // 读取H264 NALU
    buf.vbuf.bytesused = file.gcount();

    if (file.eof())
    {
        buf.vbuf.flags |= V4L2_BUF_FLAG_LAST;
    }

    if (buf.vbuf.bytesused > 0)
    {
        queueBuffer(buf);
    }
}

void Port::clearAndQueue(BufferUserPtr &buf)
{
    buf.clear();
    queueBuffer(buf);
}

void Port::dumpBuffer(BufferUserPtr &buf)
{
    // 输出是V4L2_PIX_FMT_YUV420M,buf.vbuf.length=3
    // 把每个平面按照顺序写入文件
    for (uint32_t i = 0; i < buf.vbuf.length; ++i)
    {
        file.write((char *)buf.vbuf.m.planes[i].m.userptr, buf.vbuf.m.planes[i].bytesused);
    }
}

void Port::streamOn()
{
    int ret = ioctl(fd, VIDIOC_STREAMON, &type);
    if (ret != 0)
    {
        throw Exception("Failed to stream on.");
    }
}

void Port::streamOff()
{
    int ret = ioctl(fd, VIDIOC_STREAMOFF, &type);
    if (ret != 0)
    {
        throw Exception("Failed to stream off.");
    }

    pending = 0;
}

/*****************************************************************************
 * Decoder
 *****************************************************************************/

class Decoder
{
public:
    Decoder(const char *input, const char *output);
    virtual ~Decoder();

    void run();

private:
    void clearAndQueue();

    int fd;
    Port input; // 读取h264数据放到V4L2_BUF_TYPE_VIDEO_OUTPUT对应的buf队列中
    Port output; // 从V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE对应的队列中获取解码后的数据
};
/*
* V4L2_BUF_TYPE_VIDEO_OUTPUT是未解码数据,存放ES流数据
* V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE是已经解码数据
*/
Decoder::Decoder(const char *input, const char *output) :
    input(fd, V4L2_BUF_TYPE_VIDEO_OUTPUT, input),
    output(fd, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, output)
{
    fd = open("/dev/video0", O_RDWR); // 用摄像头的解码功能进行解码
    if (fd < 0)
    {
        throw Exception("Failed to open device.");
    }
    // 获取支持的格式,如果下面的输出不包含H264,表示摄像头不支持视频编解码功能
    struct v4l2_fmtdesc fmt;
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    fmt.index = 0;

    while (ioctl(fd, VIDIOC_ENUM_FMT, &fmt) != -1) {
        printf("Format: %s (FourCC: %c%c%c%c)\n", fmt.description,
               fmt.pixelformat & 0xFF,
               (fmt.pixelformat >> 8) & 0xFF,
               (fmt.pixelformat >> 16) & 0xFF,
               (fmt.pixelformat >> 24) & 0xFF);

        fmt.index++;
    }
}

Decoder::~Decoder()
{
    close(fd);
}

void Decoder::run()
{
    /* 获取并设置解码输入输出格式 */
    input.getSetFormat(V4L2_PIX_FMT_H264, 4 * 1024 * 1024); // 4 * 1024 * 1024指定H264数据缓冲区最大size
    output.getSetFormat(V4L2_PIX_FMT_YUV420M); // V4L2_PIX_FMT_YUV420M就是YUV420 planar格式,解码后数据的size由v4l2框架计算

    /* 分配输入输出缓冲区 */
    input.requestBuffers(6); // 分配6个输入缓冲区
    output.requestBuffers(6); // 分配6个解码输出缓冲区

    /* 将输入缓存区填充H264数据然后入队*/
    for (size_t i = 0; i < input.buffers.size(); ++i)
    {
        input.fillAndQueue(input.buffers[i]);
    }

    /* 解码输出buffer重置并全部入队 */
    clearAndQueue();

    /* 开始解码*/
    input.streamOn();
    output.streamOn();

    while (true)
    {
        struct pollfd p = {
            .fd = fd, .events = POLLPRI
        };

        if (input.pending > 0) // 输入队列(h264)有数据-关注输出事件
        {
            p.events |= POLLOUT;
        }

        if (output.pending > 0) // 输出队队列(YUV解码数据)-关注输入事件
        {
            p.events |= POLLIN;
        }

        int ret = poll(&p, 1, 60000); // 60秒

        if (ret < 0)
        {
            throw Exception("Poll returned error code.");
        }

        if (p.revents & POLLERR)
        {
            throw Exception("Poll returned error event.");
        }

        if (ret == 0)
        {
            throw Exception("Poll timed out.");
        }

        if (p.revents & POLLOUT)
        {
            BufferUserPtr &buf = input.dequeueBuffer(); // 取出空闲buffer
            input.fillAndQueue(buf);// 读取H264 NALU 重新送入解码队列
        }

        if (p.revents & POLLIN)
        {
            BufferUserPtr &buf = output.dequeueBuffer(); // 从输出队列中取出解码后的数据

            if (buf.vbuf.flags & V4L2_BUF_FLAG_LAST)
            {
                cout << "EOS" << endl;
                break;
            }

            if (buf.vbuf.m.planes[0].bytesused == 0) // 分辨率发生变化
            {
                cout << "Resolution changed." << endl;
                output.streamOff();
                output.requestBuffers(0);
                output.requestBuffers(output.getBufferCount());
                clearAndQueue();
                output.streamOn();
            }
            else
            {
                output.dumpBuffer(buf); // 写入文件
                output.clearAndQueue(buf); // 清空buf并归重新送入队列
            }
        }
    }
}

void Decoder::clearAndQueue()
{
    /* Clear and queue output buffers. */
    for (size_t i = 0; i < output.buffers.size(); ++i)
    {
        output.clearAndQueue(output.buffers[i]);
    }
}

/*****************************************************************************
 * Main functions
 *****************************************************************************/

static void help(const char *exe)
{
    cout << "Usage: " << exe << " <INPUT> <OUTPUT>" << endl;
    cout << endl;
    cout << "    INPUT   Input H264 stream." << endl;
    cout << "    OUTPUT  Output YUV420 stream." << endl;
}

int main(int argc, char **argv)
{
    if (argc <= 2)
    {
        help(argv[0]);
        return 1;
    }

    Decoder decoder(argv[1], argv[2]);
    decoder.run();

    return 0;
}

03-27 09:11