// 引入FFmpeg头文件
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavdevice/avdevice.h>
}

int main() {
    av_register_all();  // 注册所有的格式和编解码器
    avformat_network_init();  // 初始化网络模块
    avdevice_register_all();
    // 使用FFmpeg采集摄像头视频
    AVFormatContext* pFormatCtx = avformat_alloc_context();
    AVInputFormat* pInputFmt = av_find_input_format("dshow");
    avformat_open_input(&pFormatCtx, "video=Integrated Camera", pInputFmt, nullptr);
    avformat_find_stream_info(pFormatCtx, nullptr);

    // 查找视频流
    int videoStreamIndex = -1;
    for (unsigned int i = 0; i < pFormatCtx->nb_streams; i++) {
        if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoStreamIndex = i;
            break;
        }
    }

    // 初始化视频解码器
    AVCodecParameters* pCodecParams = pFormatCtx->streams[videoStreamIndex]->codecpar;
    AVCodec* pCodec = avcodec_find_decoder(pCodecParams->codec_id);
    AVCodecContext* pCodecCtx = avcodec_alloc_context3(pCodec);
    avcodec_parameters_to_context(pCodecCtx, pCodecParams);
    avcodec_open2(pCodecCtx, pCodec, nullptr);

    // 初始化YUV转换
    struct SwsContext* pSwsCtx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
        pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P,
        SWS_BICUBIC, nullptr, nullptr, nullptr);

    // 初始化H264编码器
    AVCodec* pEncodec = avcodec_find_encoder_by_name("libx264");
    AVCodecContext* pEncodecCtx = avcodec_alloc_context3(pEncodec);
    pEncodecCtx->width = pCodecCtx->width;
    pEncodecCtx->height = pCodecCtx->height;
    pEncodecCtx->time_base = { 1, 25 };
    pEncodecCtx->rc_buffer_size = 4000000;
    pEncodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
    av_opt_set(pEncodecCtx->priv_data, "preset", "ultrafast", 0);
    av_opt_set(pEncodecCtx->priv_data, "tune", "zerolatency", 0);
    avcodec_open2(pEncodecCtx, pEncodec, nullptr);

    // 初始化输出文件
    AVFormatContext* pOutFormatCtx = avformat_alloc_context();
    avformat_alloc_output_context2(&pOutFormatCtx, nullptr, nullptr, "output.h264");
    AVStream* pOutStream = avformat_new_stream(pOutFormatCtx, nullptr);
    avcodec_parameters_copy(pOutStream->codecpar, pCodecParams);
    avio_open(&pOutFormatCtx->pb, "output.h264", AVIO_FLAG_WRITE);
    avformat_write_header(pOutFormatCtx, nullptr);

    // 采集、解码、转换格式、编码、保存文件
    AVPacket packet;
    while (av_read_frame(pFormatCtx, &packet) >= 0) {
        if (packet.stream_index == videoStreamIndex) {
            avcodec_send_packet(pCodecCtx, &packet);
            AVFrame* pDecodedFrame = av_frame_alloc();
            int ret = avcodec_receive_frame(pCodecCtx, pDecodedFrame);
            if (ret >= 0) {
                AVFrame* pConvertedFrame = av_frame_alloc();
                pConvertedFrame->format = AV_PIX_FMT_YUV420P;
                pConvertedFrame->width = pCodecCtx->width;
                pConvertedFrame->height = pCodecCtx->height;
                av_frame_get_buffer(pConvertedFrame, 32);
                sws_scale(pSwsCtx, pDecodedFrame->data, pDecodedFrame->linesize, 0, pCodecCtx->height, pConvertedFrame->data, pConvertedFrame->linesize);
                av_packet_unref(&packet);
                av_init_packet(&packet);
                avcodec_send_frame(pEncodecCtx, pConvertedFrame);
                avcodec_receive_packet(pEncodecCtx, &packet);
                packet.stream_index = pOutStream->index;
                av_write_frame(pOutFormatCtx, &packet);
                av_frame_free(&pConvertedFrame);
            }
            av_frame_free(&pDecodedFrame);
        }
        av_packet_unref(&packet);
    }

    // 释放所占资源
    av_write_trailer(pOutFormatCtx);
    avformat_free_context(pOutFormatCtx);
    avcodec_close(pEncodecCtx);
    avcodec_free_context(&pEncodecCtx);
    sws_freeContext(pSwsCtx);
    avcodec_close(pCodecCtx);
    avcodec_free_context(&pCodecCtx);
    avformat_close_input(&pFormatCtx);
    avformat_network_deinit();

    return 0;
}
01-09 17:34