开发者

配合ffmpeg接口获取视频音频媒体信息详解

目录
  • 一、前言
  • 二、调用ffprobe获取媒体信息
    • 【1】获取尺寸和时长
    • 【2】获取媒体详细并解析出来
  • 三、调用ffmpeg函数获取媒体信息
    • 【1】获取视频信息
    • 【2】获取视频、音频详细信息

一、前言

做音视频开发过程中,经常需要获取媒体文件的详细信息。

比如:获取视频文件的总时间、帧率、尺寸、码率等等信息。 获取音频文件的的总时间、帧率、码率,声道等信息。 这篇文章贴出2个我封装好的函数,直接调用就能获取媒体信息返回,cop过去就能使用,非常方便。

如果要获取详细信息,可以使用ffprobe实现,也可以调用ffmpeg函数直接打开视频解析获取。

下面会演示两种方式,一种直接调用 ffprobe.exe实现,一种是调用ffmpeg函数直接打开视频解析获取。

如果调用ffprobe.exe实现,可以编译ffmpeg源码,以静态方式编译ffprobe.exe,这样调用起来比较方便,不需要带任何的依赖库。

下面 调用ffprobe.exe以jsON形式输出媒体文件的详细信息。

 ffprobe -v quiet -of json -i D:/123.mp4  -show_streams

执行之后直接通过JSON格式输出:

 C:\Users\11266>ffprobe -v quiet -of json -i D:/123.mp4  -show_streams
 {
     "streams": [
         {
             "index": 0,
             "codec_name": "aac",
             "codec_long_name": "AAC (Advanced Audio Coding)",
             "profile": "LC",
             "codec_type": "audio",
             "codec_time_base": "1/88200",
             "codec_tag_string": "mp4a",
             "codec_tag": "0x6134706d",
             "sample_fmt": "fltp",
             "sample_rate": "88200",
             "channels": 2,
             "channel_layout": "stereo",
             "bits_per_sample": 0,
             "r_frame_rate": "0/0",
             "avg_frame_rate": "0/0",
             "time_base": "1/44100",
             "start_pts": 0,
             "start_time": "0.000000",
             "duration_ts": 4141046,
             "duration": "93.901270",
             "bit_rate": "127948",
             "max_bit_rate": "132760",
             "nb_frames": "4045",
             "disposition": {
                 "default": 1,
                 "dub": 0,
                 "original": 0,
                 "comment": 0,
                 "lyrics": 0,
                 "karaoke": 0,
                 "forced": 0,
                 "hearing_impaired": 0,
                 "visual_impaired": 0,
                 "clean_effects": 0,
                 "attached_pic": 0,
                 "timed_thumbnails": 0
             },
             "tags": {
                 "creation_time": "2015-04-30T02:43:22.000000Z",
                 "ljavascriptanguage": "und",
                 "handler_name": "GPAC ISO Audio Handler"
             }
         },
         {
             "index": 1,
             "codec_name": "h364",
             "codec_long_name": "H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10",
             "profile": "Main",
             "codec_type": "video",
             "codec_time_base": "2349/70450",
             "codec_tag_string": "avc1",
             "codec_tag": "0x31637661",
             "width": 1280,
             "height": 720,
             "coded_width": 1280,
             "coded_height": 720,
             "has_b_frames": 0,
             "sample_ASPect_ratio": "1:1",
             "display_aspect_ratio": "16:9",
             "pix_fmt": "yuv420p",
             "level": 51,
             "chroma_location": "left",
             "refs": 1,
             "is_avc": "true",
             "nal_length_size": "4",
             "r_frame_rate": "25/1",
             "avg_frame_rate": "35225/2349",
             "time_base": "1/30000",
             "start_pts": 0,
             "start_time": "0.000000",
             "duration_ts": 2816400,
             "duration": "93.880000",
             "bit_rate": "582474",
             "bits_per_raw_sample": "8",
             "nb_frames": "1409",
             "disposition": {
                 "default": 1,
                 "dub": 0,
                 "original": 0,
                 "comment": 0,
                 "lyrics": 0,
                 "karaoke": 0,
                 "forced": 0,
                 "hearing_impaired": 0,
                 "visual_impaired": 0,
                 "clean_effects": 0,
                 "attached_pic": 0,
                 "timed_thumbnails": 0
             },
             "tags": {
                 "creation_time": "2015-04-30T02:43:23.000000Z",
                 "language": "und",
                 "handler_name": "GPAC ISO Video Handler"
             }
         }
     ]
 }

如果只是想要得到 媒体的总时长、尺寸信息,那么执行下面命令即可:

 C:\Users\11266>ffprobe -i D:/123.mp4
 ffprobe version 4.2.2 Copyright (c) 2007-2019 the FFmpeg developers
   built with gcc 9.2.1 (GCC) 20200122
   configuration: --disable-static --enable-shared --enable-gpl --enable-version3 --enable-sdl2 --enable-fontconfig --enable-gnutls --enable-iconv --enable-libass --enable-libdav1d --enable-libbluray --enable-libfreetype --enable-libmp3lame --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenjpeg --enable-libopus --enable-libshine --enable-libsnappy --enable-libsoxr --enable-libtheora --enable-libtwolame --enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx264 --enable-libx265 --enable-libXML2 --enable-libzimg --enable-lzma --enable-zlib --enable-gmp --enable-libvidstab --enable-libvorbis --enable-libvo-amrwbenc --enable-libmysofa --enable-libspeex --enable-libxvid --enable-libaom --enable-libmfx --enable-amf --enable-ffnvcodec --enable-cuvid --enable-d3d11va --enable-nvenc --enable-nvdec --enable-dxva2 --enable-avisynth --enable-libopenmpt
   libavutil      56. 31.100 / 56. 31.100
   libavcodec     58. 54.100 / 58. 54.100
   libavformat    58. 29.100 / 58. 29.100
   libavdevice    58.  8.100 / 58.  8.100
   libavfilter     7. 57.100 /  7. 57.100
   libswscale      5.  5.100 /  5.  5.100
   libswresample   3.  5.100 /  3.  5.100
   libpostproc    55.  5.100 / 55.  5.100
 Input #0, mov,mp4,m4a,3gp,3g2,mj2, from 'D:/123.mp4':
   Metadata:
     major_brand     : mp42
     minor_version   : 0
     compatible_brands: mp42isom
     creation_time   : 2015-04-30T02:43:22.000000Z
   Duration: 00:01:33.90, start: 0.000000, bitrate: 715 kb/s
     Stream #0:0(und): Audio: aac (LC) (mp4a / 0x6134706D), 88200 Hz, stereo, fltp, 127 kb/s (default)
     Metadata:
       creation_time   : 2015-04-30T02:43:22.000000Z
       handler_name    : GPAC ISO Audio Handler
     Stream #0:1(und): Video: h364 (Main) (avc1 / 0x31637661), yuv420p, 1280x720 [SAR 1:1 DAR 16:9], 582 kb/s, 15 fps, 25 tbr, 30k tbn, 20000k tbc (default)
     Metadata:
       creation_time   : 2015-04-30T02:43:23.000000Z
       handler_name    : GPAC ISO Video Handler

二、调用ffprobe获取媒体信息

下面利用Qt编写代码调用ffprobe可执行文件,解析媒体信息输出。

下面封装了2个函数,完整媒体信息的解析返回。

【1】获取尺寸和时长

 //媒体信息
 struct MEDIA_INFO
 {
     int width;  //宽度
     int height; //高度
     qint64 duration;//视频总时长--毫秒 
 };
 //获取视频的尺寸和总时间信息
 struct MEDIA_INFO GetVideo_SizeInfo(QString file)
 {
     int w, h;
     struct MEDIA_INFO info = {0,0,0};
     //拼接ffmpge的路径
     QString cmd = QString("%1 -i "%2"").arg(FFPROBE_NAME).arg(file);
     QProcess process;
     process.setProcessChannelMode(QProcess::MergedChannels);
     process.start(cmd.toUtf8());
     process.waitForFinished();
     process.waitForReadyRead();
     //qDebug() << "cmd:" << cmd;
     if (process.exitCode() == 0)
     {
         log_printf(QString("Run Success"));
         QString qba = process.readAll();
         QByteArray utf8_str = qba.toUtf8();
         // Match duration
         QRegularExpression reDuration("Duration: (\d{2}:\d{2}:\d{2}\.\d{2})");
         QRegularExpressionMatch matchDuration = reDuration.match(utf8_str);
         if (matchDuration.hasMatch()) 
         {
             QString duration = matchDuration.captured(1);
             // "00:06:37.15"
             qDebug() << "视频总时间:" << duration;
             int hour=duration.section(":", 0, 0).toInt();
             int minute = duration.section(":", 1, 1).toInt();
             int second = duration.section(":", 2, 3).section(".",0,0).toInt();
             int ms = duration.section(":", 2, 3).section(".", 1, 1).toInt();
             info.duration= hour * 60 * 60 *1000 + minute * 60 *1000 + second*1000 + ms;
         }
         else 
         {
             qDebug() << "No duration match found.";
         }
         // Match resolution
         QRegularExpression reResolution("\d{3,4}x\d{3,4}");
         QRegularExpressionMatch matchResolution = reResolution.match(utf8_str);
         if (matchResolution.hasMatch()) 
         {
             QString resolution = matchResolution.captured(0);
             //qDebug() << "视频尺寸:" << resolution;
             //qDebug() << "视频尺寸--w:" << resolution.section("x", 0, 0);
             //qDebug() << "视频尺寸--h:" << resolution.section("x", 1, 1);
             info.width = resolution.section("x", 0, 0).toInt();
             info.height = resolution.section("x", 1, 1).toInt();
         }
         else 
         {
             qDebug() << "No resolution match found.";
         }
     }
     else
     {
         log_printf(QString("Run ERROR"));
         return info;
     }
     return info;
 }

【2】获取媒体详细并解析出来

 // 定义用于存储解析结果的结构体
 struct Stream {
     int index;
     QString codecName;
     QString codecLongName;
     QString profile;
     QString cwww.devze.comodecType;
     QString codecTimeBase;
     QString codecTagString;
     QString codecTag;
     int width;
     int height;
     int codedwidth;
     int codedHeight;
     bool hasBFrames;
     QString pixFmt;
     int level;
     QString colorRange;
     QString colorSpace;
     QString colorTransfer;
     QString colorPrimaries;
     QString chromaLocation;
     int refs;
     bool isAVC;
     int nalLengthSize;
     QString rFrameRate;
     QString avgFrameRate;
     QString timeBase;
     qint64 startPts;
     QString startTime;
     qint64 durationTs;
     QString duration;
     int bitRate;
     int bitsPerRawSample;
     int nbFrames;
     struct Disposition {
         int defaultValue;
         int dub;
         int original;
         int comment;
         int lyrics;
         int karaoke;
         int forced;
         int hearingImpaired;
         int visualImpaired;
         int cleanEffects;
         int attachedPic;
         int timedThumbnails;
     } disposition;
     strphpuct Tags {
         QString language;
         QString handlerName;
     } tags;
 }gvGDDfDp;
 
 
 
 //解析存放媒体信息的JSON结构
 QVector<Stream> DecodeMediaInfo(QString mediafile)
 {
     QByteArray  byte_data=mediafile.toUtf8();
 
     //获取媒体信息
     QByteArray jsonStr = GetMediaInfo(byte_data.data());
 
 
     // 将json字符串转换为json文档对象
     QJsonDocument doc = QJsonDocument::fromJson(jsonStr);
 
     // 获取顶层json对象
     QJsonObject topLevelObj = doc.object();
 
     // 获取streams数组
     QJsonArray streamsArray = topLevelObj.value("streams").toArray();
 
     // 遍历streams数组,将每个元素转换为Stream结构体
     QVector<Stream> streamVec;
     for (const QJsonValue & streamValue : streamsArray) {
         QJsonObject streamObj = streamValue.toObject();
 
         // 创建新的Stream实例,并设置属性值
         Stream stream;
         stream.index = streamObj.value("index").toInt();
         stream.codecName = streamObj.value("codec_name").toString();
         stream.codecLongName = streamObj.value("codec_long_name").toString();
         stream.profile = streamObj.value("profile").toString();
         stream.codecType = streamObj.value("codec_type").toString();
         stream.codecTimeBase = streamObj.value("codec_time_base").toString();
         stream.codecTagString = streamObj.value("codec_tag_string").toString();
         stream.codecTag = streamObj.value("codec_tag").toString();
         stream.width = streamObj.value("width").toInt();
         stream.height = streamObj.value("height").toInt();
         stream.codedWidth = streamObj.value("coded_width").toInt();
         stream.codedHeight = streamObj.value("coded_height").toInt();
         stream.hasBFrames = streamObj.value("has_b_frames").toBool();
         stream.pixFmt = streamObj.value("pix_fmt").toString();
         stream.level = streamObj.value("level").toInt();
         stream.colorRange = streamObj.value("color_range").toString();
         stream.colorSpace = streamObj.value("color_space").toString();
         stream.colorTransfer = streamObj.value("color_transfer").toString();
         stream.colorPrimaries = streamObj.value("color_primaries").toString();
         stream.chromaLocation = streamObj.value("chroma_location").toString();
         stream.refs = streamObj.value("refs").toInt();
         stream.isAVC = streamObj.value("is_avc").toBool();
         stream.nalLengthSize = streamObj.value("nal_length_size").toInt();
         stream.rFrameRate = streamObj.value("r_frame_rate").toString();
         stream.avgFrameRate = streamObj.value("avg_frame_rate").toString();
         stream.timeBase = streamObj.value("time_base").toString();
         stream.startPts = streamObj.value("start_pts").toInt();
         stream.startTime = streamObj.value("start_time").toString();
         stream.durationTs = streamObj.value("duration_ts").toInt();
         stream.duration = streamObj.value("duration").toString();
         stream.bitRate = streamObj.value("bit_rate").toInt();
         stream.bitsPerRawSample = streamObj.value("bits_per_raw_sample").toInt();
         stream.nbFrames = streamObj.value("nb_frames").toInt();
 
         // 解析disposition对象
         QJsonObject dispositionObj = streamObj.value("disposition").toObject();
         stream.disposition.defaultValue = dispositionObj.value("default").toInt();
         stream.disposition.dub = dispositionObj.value("dub").toInt();
         stream.disposition.original = dispositionObj.value("original").toInt();
         stream.disposition.comment = dispositionObj.value("comment").toInt();
         stream.disposition.lyrics = dispositionObj.value("lyrics").toInt();
         stream.disposition.karaoke = dispositionObj.value("karaoke").toInt();
         stream.disposition.forced = dispositionObj.value("forced").toInt();
         stream.disposition.hearingImpaired = dispositionObj.value("hearing_impaired").toInt();
         stream.disposition.visualImpaired = dispositionObj.value("visual_impaired").toInt();
         stream.disposition.cleanEffects = dispositionObj.value("clean_effects").toInt();
         stream.disposition.attachedPic = dispositionObj.value("attached_pic").toInt();
         stream.disposition.timedThumbnails = dispositionObj.value("timed_thumbnails").toInt();
 
         // 解析tags对象
         QJsonObject tagsObj = streamObj.value("tags").toObject();
         stream.tags.language = tagsObj.value("language").toString();
         stream.tags.handlerName = tagsObj.value("handler_name").toString();
 
         // 将Stream实例添加到vector中
         streamVec.append(stream);
 
         // 打印解析结果
         for (const Stream & stream : streamVec) {
             qDebug() << "Index:" << stream.index
                 << "Codec Name:" << stream.codecName
                 << "Codec Long Name:" << stream.codecLongName
                 << "Profile:" << stream.profile
                 << "Codec Type:" << stream.codecType
                 << "Codec Time Base:" << stream.codecTimeBase
                 << "Codec Tag String:" << stream.codecTagString
                 << "Codec Tag:" << stream.codecTag
                 << "Width:" << stream.width
                 << "Height:" << stream.height
                 << "Coded Width:" << stream.codedWidth
                 << "Coded Height:" << stream.codedHeight
                 << "Has B Frames:" << stream.hasBFrames
                 << "Pixel Format:" << stream.pixFmt
                 << "Level:" << stream.level
                 << "Color Range:" << stream.colorRange
                 << "Color Space:" << stream.colorSpace
                 << "Color Transfer:" << stream.colorTransfer
                 << "Color Primaries:" << stream.colorPrimaries
                 << "Chroma Location:" << stream.chromaLocation
                 << "Refs:" << stream.refs
                 << "Is AVC:" << stream.isAVC
                 << "NAL Length Size:" << stream.nalLengthSize
                 << "R Frame Rate:" << stream.rFrameRate
                 << "Avg Frame Rate:" << stream.avgFrameRate
                 << "Time Base:" << stream.timeBase
                 << "Start PTS:" << stream.startPts
                 << "Start Time:" << stream.startTime
                 << "Duration TS:" << stream.durationTs
                 << "Duration:" << stream.duration
                 << "Bitrate:" << stream.bitRate
                 << "Bits per Raw Sample:" << stream.bitsPerRawSample
                 << "Number of Frames:" << stream.nbFrames
                 << "Disposition Default Value:" << stream.disposition.defaultValue
                 << "Disposition Dub:" << stream.disposition.dub
                 << "Disposition Original:" << stream.disposition.original
                 << "Disposition Comment:" << stream.disposition.comment
                 << "Disposition Lyrics:" << stream.disposition.lyrics
                 << "Disposition Karaoke:" << stream.disposition.karaoke
                 << "Disposition Forced:" << stream.disposition.forced
                 << "Disposition Hearing Impaired:" << stream.disposition.hearingImpaired
                 << "Disposition Visual Impaired:" << stream.disposition.visualImpaired
                 << "Disposition Clean Effects:" << stream.disposition.cleanEffects
                 << "Disposition Attached Pic:" << stream.disposition.attachedPic
                 << "Disposition Timed Thumbnails:" << stream.disposition.timedThumbnails
                 << "Tags Language:" << stream.tags.language
                 << "Tags Handler Name:" << stream.tags.handlerName;
         }
     }
     return streamVec;
 }
 

三、调用ffmpeg函数获取媒体信息

如果在程序里不方便调用ffprobe.exe,那么也可以直接调用ffmpeg的函数,打开视频、音频解析媒体数据。

【1】获取视频信息

下面给出代码:

 #include <libavcodec/avcodec.h>
 #include <libavformat/avformat.h>
 #include <libavutil/dict.h>
 int main()
 {
     AVFormatContext *format_ctx = NULL;
     int ret;
     // 打开视频文件
     ret = avformat_open_input(&format_ctx, "video.mp4", NULL, NULL);
     if (ret != 0) {
         printf("无法打开视频文件\n");
         return -1;
     }
     // 获取视频文件中每个流的详细信息
     ret = avformat_find_stream_info(format_ctx, NULL);
     if (ret < 0) {
         printf("无法获取视频流信息\n");
         return -1;
     }
     // 输出视频流的详细信息
     for (int i = 0; i < format_ctx->nb_streams; i++) {
         AVStream *stream = format_ctx->streams[i];
         AVCodecParameters *params = stream->codecpar;
         AVRational time_base = stream->time_base;
         printf("流%d:\n", i);
         printf("  时间基数:%d/%d\n", time_base.num, time_base.den);
         printf("  编码器ID:%d\n", params->codec_id);
         printf("  视频宽度:%d\n", params->width);
         printf("  视频高度:%d\n", params->height);
         printf("  帧率:%d/%d\n", stream->avg_frame_rate.num, stream->avg_frame_rate.den);
     }
     // 获取元数据(metadata)
     AVDictionaryEntry *tag = NULL;
     while ((tag = av_dict_get(format_ctx->metadata, "", tag, AV_DICT_IGNORE_SUFFIX))) {
         printf("%s=%s\n", tag->key, tag->value);
     }
     // 关闭输入文件
     avformat_close_input(&format_ctx);
     return 0;
 }

这个代码片段可以在linux或Windows操作系统上编译,并且需要在编译时链接FFmpeg库。

【2】获取视频、音频详细信息

 #include <stdio.h>
 #include <libavformat/avformat.h>
 int main(int argc, char **argv) {
     AVFormatContext *fmt_ctx = NULL;
     AVDictionaryEntry *tag = NULL;
     // 打开输入媒体文件
     if (avformat_open_input(&fmt_ctx, argv[1], NULL, NULL) < 0) {
         fprintf(stderr, "Cannot open input file\n");
         return -1;
     }
     // 获取媒体文件信息
     if (avformat_find_stream_info(fmt_ctx, NULL) < 0) {
         fprintf(stderr, "Cannot find stream information\n");
         avformat_close_input(&fmt_ctx);
         return -1;
     }
     // 输出媒体文件信息
     printf("File: %s\n", argv[1]);
     printf("Format: %s\n", fmt_ctx->iformat->name);
     printf("Duration: %lld seconds\n", fmt_ctx->duration / AV_TIME_BASE);
     for (int i = 0; i < fmt_ctx->nb_streams; i++) {
         AVStream *stream = fmt_ctx->streams[i];
         const char *type = "Unknown";
         if (stream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
             type = "Video";
             printf("\n%s Stream #%d:\n", type, i);
             printf("Codec: %s\n", avcodec_get_name(stream->codecpar->codec_id));
             printf("Resolution: %dx%d\n", stream->codecpar->width, stream->codecpar->height);
             printf("Frame Rate: %.2f fps\n", av_q2d(stream->avg_frame_rate));
             printf("Bit Rate: %lld kbps\n", stream->codecpar->bit_rate / 1000);
         } else if (stream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
             type = "Audio";
             printf("\n%s Stream #%d:\n", type, i);
             printf("Codec: %s\n", avcodec_get_name(stream->codecpar->codec_id));
             printf("Sample Rate: %d Hz\n", stream->codecpar->sample_rate);
             printf("Channels: %d\n", stream->codecpar->channels);
             printf("Bit Rate: %lld kbps\n", stream->codecpar->bit_rate / 1000);
         }
         // 输出流的元数据信息
         while ((tag = av_dict_get(stream->metadata, "", tag, AV_DICT_IGNORE_SUFFIX))) {
             printf("%s=%s\n", tag->key, tag->value);
         }
     }
     // 关闭输入媒体文件
     avformat_close_input(&fmt_ctx);
     return 0;
 }

使用方法:

  • 将示例代码保存为www.devze.comffprobe.c文件。
  • 在命令行中进入该文件所在目录,执行以下命令进行编译:
 gcc -o ffprobe ffprobe.c -lavformat -lavcodec -lavutil
  • 执行以下命令获取视频或音频文件的全部参数信息:
 ./ffprobe [input_file]

其中,[input_file开发者_开发培训]是输入的视频或音频文件路径。

例如,执行以下命令获取test.mp4视频文件的全部参数信息:

 ./ffprobe test.mp4

以上就是配合ffmpeg接口获取视频音频媒体信息详解的详细内容,更多关于ffmpeg接口获取视频音频的资料请关注我们其它相关文章!

0

上一篇:

下一篇:

精彩评论

暂无评论...
验证码 换一张
取 消

最新开发

开发排行榜