C# 使用FFmpeg.Autogen对byte[]进行编解码

C# 使用FFmpeg.Autogen对byte[]进行编解码,参考:https://github.com/vanjoge/CSharpVideoDemo

入口调用类:

using System;
using System.IO;
using System.Drawing;
using System.Runtime.InteropServices;
using FFmpeg.AutoGen;namespace FFmpegAnalyzer
{public class FFmpegWrapper{/// <summary>/// 默认的编码格式/// </summary>public AVCodecID DefaultCodecFormat { get; set; } = AVCodecID.AV_CODEC_ID_H264;/// <summary>/// 注册FFmpeg/// </summary>public static void RegisterFFmpeg(){FFmpegBinariesHelper.RegisterFFmpegBinaries();// 初始化注册ffmpeg相关的编码器ffmpeg.av_register_all();ffmpeg.avcodec_register_all();ffmpeg.avformat_network_init();}/// <summary>/// 注册日志/// <exception cref="NotSupportedException">.NET Framework 不支持日志注册</exception>/// </summary>private unsafe void RegisterFFmpegLogger(){// 设置记录ffmpeg日志级别ffmpeg.av_log_set_level(ffmpeg.AV_LOG_VERBOSE);av_log_set_callback_callback logCallback = (p0, level, format, vl) =>{if (level > ffmpeg.av_log_get_level()) return;var lineSize = 1024;var lineBuffer = stackalloc byte[lineSize];var printPrefix = 1;ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);Console.Write(line);};ffmpeg.av_log_set_callback(logCallback);}#region 编码器/// <summary>/// 创建编码器/// </summary>/// <param name="frameSize">编码前一帧原始数据的大小</param>/// <param name="isRgb">rgb数据</param>public void CreateEncoder(Size frameSize, bool isRgb = true){_fFmpegEncoder = new FFmpegEncoder(frameSize, isRgb);_fFmpegEncoder.CreateEncoder(DefaultCodecFormat);}/// <summary>/// 编码/// </summary>/// <param name="frameBytes">编码帧数据</param>/// <returns></returns>public byte[] EncodeFrames(byte[] frameBytes){return _fFmpegEncoder.EncodeFrames(frameBytes);}/// <summary>/// 释放编码器/// </summary>public void DisposeEncoder(){_fFmpegEncoder.Dispose();}#endregion#region 解码器/// <summary>/// 创建解码器/// </summary>/// <param name="decodedFrameSize">解码后数据的大小</param>/// <param name="isRgb">Rgb数据</param>public void CreateDecoder(Size decodedFrameSize, bool isRgb = true){_fFmpegDecoder = new FFmpegDecoder(decodedFrameSize, isRgb);_fFmpegDecoder.CreateDecoder(DefaultCodecFormat);}/// <summary>/// 解码/// </summary>/// <param name="frameBytes">解码帧数据</param>/// <returns></returns>public byte[] DecodeFrames(byte[] frameBytes){return _fFmpegDecoder.DecodeFrames(frameBytes);}/// <summary>/// 释放解码器/// </summary>public void DisposeDecoder(){_fFmpegDecoder.Dispose();}#endregion/// <summary>编码器</summary>private FFmpegEncoder _fFmpegEncoder;/// <summary>解码器</summary>private FFmpegDecoder _fFmpegDecoder;}
}

其它业务类:

using System;
using System.IO;
using System.Runtime.InteropServices;namespace FFmpegAnalyzer
{internal class FFmpegBinariesHelper{private const string LD_LIBRARY_PATH = "LD_LIBRARY_PATH";internal static void RegisterFFmpegBinaries(){switch (Environment.OSVersion.Platform){case PlatformID.Win32NT:case PlatformID.Win32S:case PlatformID.Win32Windows:var current = AppDomain.CurrentDomain.BaseDirectory;var probe = $"FFmpeg/bin/{(Environment.Is64BitProcess ? @"x64" : @"x86")}";while (current != null){var ffmpegDirectory = Path.Combine(current, probe);if (Directory.Exists(ffmpegDirectory)){Console.WriteLine($"FFmpeg binaries found in: {ffmpegDirectory}");RegisterLibrariesSearchPath(ffmpegDirectory);return;}current = Directory.GetParent(current)?.FullName;}break;case PlatformID.Unix:case PlatformID.MacOSX:var libraryPath = Environment.GetEnvironmentVariable(LD_LIBRARY_PATH);RegisterLibrariesSearchPath(libraryPath);break;}}private static void RegisterLibrariesSearchPath(string path){switch (Environment.OSVersion.Platform){case PlatformID.Win32NT:case PlatformID.Win32S:case PlatformID.Win32Windows:SetDllDirectory(path);break;case PlatformID.Unix:case PlatformID.MacOSX:string currentValue = Environment.GetEnvironmentVariable(LD_LIBRARY_PATH);if (string.IsNullOrWhiteSpace(currentValue) == false && currentValue.Contains(path) == false){string newValue = currentValue + Path.PathSeparator + path;Environment.SetEnvironmentVariable(LD_LIBRARY_PATH, newValue);}break;}}[DllImport("kernel32", SetLastError = true)]private static extern bool SetDllDirectory(string lpPathName);}
}
using System;
using System.Drawing;
using System.Runtime.InteropServices;
using FFmpeg.AutoGen;namespace FFmpegAnalyzer
{/// <summary>/// 解码器/// </summary>internal unsafe class FFmpegDecoder{/// <param name="decodedFrameSize">解码后数据的大小</param>/// <param name="isRgb">Rgb数据</param>public FFmpegDecoder(Size decodedFrameSize, bool isRgb = true){_decodedFrameSize = decodedFrameSize;_isRgb = isRgb;}/// <summary>/// 创建解码器/// </summary>/// <param name="codecFormat">解码格式</param>public void CreateDecoder(AVCodecID codecFormat){var originPixelFormat = AVPixelFormat.AV_PIX_FMT_YUV420P;var destinationPixelFormat = _isRgb ? AVPixelFormat.AV_PIX_FMT_RGB24 : AVPixelFormat.AV_PIX_FMT_BGRA; //获取解码器_pDecodec = ffmpeg.avcodec_find_decoder(codecFormat);if (_pDecodec == null) throw new InvalidOperationException("Codec not found.");_pDecodecContext = ffmpeg.avcodec_alloc_context3(_pDecodec);_pDecodecContext->width = _decodedFrameSize.Width;_pDecodecContext->height = _decodedFrameSize.Height;_pDecodecContext->time_base = new AVRational { num = 1, den = 30 };_pDecodecContext->pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P;_pDecodecContext->framerate = new AVRational { num = 30, den = 1 };                        _pDecodecContext->gop_size = 30;// 设置预测算法_pDecodecContext->flags |= ffmpeg.AV_CODEC_FLAG_PSNR;_pDecodecContext->flags2 |= ffmpeg.AV_CODEC_FLAG2_FAST;_pDecodecContext->max_b_frames = 0;ffmpeg.av_opt_set(_pDecodecContext->priv_data, "preset", "veryfast", 0);ffmpeg.av_opt_set(_pDecodecContext->priv_data, "tune", "zerolatency", 0);//打开解码器ffmpeg.avcodec_open2(_pDecodecContext, _pDecodec, null);_pConvertContext = ffmpeg.sws_getContext(_decodedFrameSize.Width,_decodedFrameSize.Height,originPixelFormat,_decodedFrameSize.Width,_decodedFrameSize.Height,destinationPixelFormat,ffmpeg.SWS_FAST_BILINEAR,null, null, null);if (_pConvertContext == null)throw new ApplicationException("Could not initialize the conversion context.");var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, _decodedFrameSize.Width, _decodedFrameSize.Height, 1);_convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);_dstData = new byte_ptrArray4();_dstLineSize = new int_array4();ffmpeg.av_image_fill_arrays(ref _dstData, ref _dstLineSize, (byte*)_convertedFrameBufferPtr, destinationPixelFormat,_decodedFrameSize.Width, _decodedFrameSize.Height, 1);_isCodecRunning = true;}/// <summary>/// 解码/// </summary>/// <param name="frameBytes"></param>/// <returns></returns>public  byte[] DecodeFrames(byte[] frameBytes){if (!_isCodecRunning){throw new InvalidOperationException("解码器未运行!");}var waitDecodePacket = ffmpeg.av_packet_alloc();var waitDecoderFrame = ffmpeg.av_frame_alloc();ffmpeg.av_frame_unref(waitDecoderFrame);fixed (byte* waitDecodeData = frameBytes){waitDecodePacket->data = waitDecodeData;waitDecodePacket->size = frameBytes.Length;ffmpeg.av_frame_unref(waitDecoderFrame);try{int error;do{ffmpeg.avcodec_send_packet(_pDecodecContext, waitDecodePacket);error = ffmpeg.avcodec_receive_frame(_pDecodecContext, waitDecoderFrame);} while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));}finally{ffmpeg.av_packet_unref(waitDecodePacket);}var decodeAfterFrame = ConvertToRgb(waitDecoderFrame);var length = _isRgb? decodeAfterFrame.height * decodeAfterFrame.width * 3: decodeAfterFrame.height * decodeAfterFrame.width * 4;byte[] buffer = new byte[length];Marshal.Copy((IntPtr)decodeAfterFrame.data[0], buffer, 0, buffer.Length);return buffer;}}/// <summary>/// 释放/// </summary>public  void Dispose(){_isCodecRunning = false;//释放解码器ffmpeg.avcodec_close(_pDecodecContext);ffmpeg.av_free(_pDecodecContext);//释放转换器Marshal.FreeHGlobal(_convertedFrameBufferPtr);ffmpeg.sws_freeContext(_pConvertContext);}/// <summary>/// 转换成Rgb/// </summary>/// <param name="waitDecoderFrame"></param>/// <returns></returns>private  AVFrame ConvertToRgb(AVFrame* waitDecoderFrame){ffmpeg.sws_scale(_pConvertContext, waitDecoderFrame->data, waitDecoderFrame->linesize, 0, waitDecoderFrame->height, _dstData, _dstLineSize);var decodeAfterData = new byte_ptrArray8();decodeAfterData.UpdateFrom(_dstData);var lineSize = new int_array8();lineSize.UpdateFrom(_dstLineSize);ffmpeg.av_frame_unref(waitDecoderFrame);return new AVFrame{data = decodeAfterData,linesize = lineSize,width = _decodedFrameSize.Width,height = _decodedFrameSize.Height};}//解码器private AVCodec* _pDecodec;private AVCodecContext* _pDecodecContext;//转换缓存区private IntPtr _convertedFrameBufferPtr;private byte_ptrArray4 _dstData;private int_array4 _dstLineSize;//格式转换private SwsContext* _pConvertContext;private Size _decodedFrameSize;private readonly bool _isRgb;//解码器正在运行private bool _isCodecRunning;}
}
using System;
using System.Runtime.InteropServices;
using System.Drawing;
using FFmpeg.AutoGen;namespace FFmpegAnalyzer
{/// <summary>/// 编码器/// </summary>internal unsafe class FFmpegEncoder{/// <param name="frameSize">编码前一帧原始数据的大小</param>/// <param name="isRgb">rgb数据</param>public FFmpegEncoder(Size frameSize, bool isRgb = true){_frameSize = frameSize;_isRgb = isRgb;_rowPitch = isRgb ? _frameSize.Width * 3 : _frameSize.Width * 4;}/// <summary>/// 创建编码器/// </summary>public  void CreateEncoder(AVCodecID codecFormat){var originPixelFormat = _isRgb ? AVPixelFormat.AV_PIX_FMT_RGB24 : AVPixelFormat.AV_PIX_FMT_BGRA;var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_YUV420P;_pCodec = ffmpeg.avcodec_find_encoder(codecFormat);if (_pCodec == null)throw new InvalidOperationException("Codec not found.");_pCodecContext = ffmpeg.avcodec_alloc_context3(_pCodec);_pCodecContext->width = _frameSize.Width;_pCodecContext->height = _frameSize.Height;_pCodecContext->framerate = new AVRational { num = 30, den = 1 };_pCodecContext->time_base = new AVRational {num = 1, den = 30};_pCodecContext->gop_size = 30;_pCodecContext->pix_fmt = destinationPixelFormat;// 设置预测算法_pCodecContext->flags |= ffmpeg.AV_CODEC_FLAG_PSNR;_pCodecContext->flags2 |= ffmpeg.AV_CODEC_FLAG2_FAST;_pCodecContext->max_b_frames = 0;ffmpeg.av_opt_set(_pCodecContext->priv_data, "preset", "veryfast", 0);ffmpeg.av_opt_set(_pCodecContext->priv_data, "tune", "zerolatency", 0);//打开编码器ffmpeg.avcodec_open2(_pCodecContext, _pCodec, null);_pConvertContext = ffmpeg.sws_getContext(_frameSize.Width, _frameSize.Height, originPixelFormat, _frameSize.Width, _frameSize.Height, destinationPixelFormat,ffmpeg.SWS_FAST_BILINEAR, null, null, null);if (_pConvertContext == null)throw new ApplicationException("Could not initialize the conversion context.");var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, _frameSize.Width, _frameSize.Height, 1);_convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);_dstData = new byte_ptrArray4();_dstLineSize = new int_array4();ffmpeg.av_image_fill_arrays(ref _dstData, ref _dstLineSize, (byte*)_convertedFrameBufferPtr, destinationPixelFormat, _frameSize.Width, _frameSize.Height, 1);_isCodecRunning = true;}/// <summary>/// 释放/// </summary>public  void Dispose(){if (!_isCodecRunning) return;_isCodecRunning = false;//释放编码器ffmpeg.avcodec_close(_pCodecContext);ffmpeg.av_free(_pCodecContext);//释放转换器Marshal.FreeHGlobal(_convertedFrameBufferPtr);ffmpeg.sws_freeContext(_pConvertContext);}/// <summary>/// 编码/// </summary>/// <param name="frameBytes"></param>/// <returns></returns>public  byte[] EncodeFrames(byte[] frameBytes){if (!_isCodecRunning){throw new InvalidOperationException("编码器未运行!");}fixed (byte* pBitmapData = frameBytes){var waitToYuvFrame = new AVFrame{data = new byte_ptrArray8 { [0] = pBitmapData },linesize = new int_array8 { [0] = _rowPitch },height = _frameSize.Height};var rgbToYuv = ConvertToYuv(waitToYuvFrame, _frameSize.Width, _frameSize.Height);byte[] buffer;var pPacket = ffmpeg.av_packet_alloc();try{int error;do{ffmpeg.avcodec_send_frame(_pCodecContext, &rgbToYuv);error = ffmpeg.avcodec_receive_packet(_pCodecContext, pPacket);} while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));buffer = new byte[pPacket->size];Marshal.Copy(new IntPtr(pPacket->data), buffer, 0, pPacket->size);}finally{ffmpeg.av_frame_unref(&rgbToYuv);ffmpeg.av_packet_unref(pPacket);}return buffer;}}/// <summary>/// 转换成Yuv格式/// </summary>/// <param name="waitConvertYuvFrame"></param>/// <param name="width"></param>/// <param name="height"></param>/// <returns></returns>private  AVFrame ConvertToYuv(AVFrame waitConvertYuvFrame, int width, int height){ffmpeg.sws_scale(_pConvertContext, waitConvertYuvFrame.data, waitConvertYuvFrame.linesize, 0, waitConvertYuvFrame.height, _dstData, _dstLineSize);var data = new byte_ptrArray8();data.UpdateFrom(_dstData);var lineSize = new int_array8();lineSize.UpdateFrom(_dstLineSize);ffmpeg.av_frame_unref(&waitConvertYuvFrame);return new AVFrame{data = data,linesize = lineSize,width = width,height = height};}//编码器private AVCodec* _pCodec;private AVCodecContext* _pCodecContext;//转换缓存区private IntPtr _convertedFrameBufferPtr;private byte_ptrArray4 _dstData;private int_array4 _dstLineSize;//格式转换private SwsContext* _pConvertContext;private Size _frameSize;private readonly int _rowPitch;private readonly bool _isRgb;//编码器正在运行private bool _isCodecRunning;}
}
using FFmpeg.AutoGen;
using System;
using System.Drawing;
using System.Runtime.InteropServices;
using System.Windows;namespace FFmpegAnalyzer
{public sealed unsafe class VideoFrameConverter : IDisposable{private readonly IntPtr _convertedFrameBufferPtr;private readonly System.Drawing.Size _destinationSize;private readonly byte_ptrArray4 _dstData;private readonly int_array4 _dstLinesize;private readonly SwsContext* _pConvertContext;/// <summary>/// 帧格式转换/// </summary>/// <param name="sourceSize"></param>/// <param name="sourcePixelFormat"></param>/// <param name="destinationSize"></param>/// <param name="destinationPixelFormat"></param>public VideoFrameConverter(System.Drawing.Size sourceSize, AVPixelFormat sourcePixelFormat,System.Drawing.Size destinationSize, AVPixelFormat destinationPixelFormat){_destinationSize = destinationSize;//分配并返回一个SwsContext。您需要它使用sws_scale()执行伸缩/转换操作//主要就是使用SwsContext进行转换!!!_pConvertContext = ffmpeg.sws_getContext((int)sourceSize.Width, (int)sourceSize.Height, sourcePixelFormat,(int)destinationSize.Width,(int)destinationSize.Height, destinationPixelFormat,ffmpeg.SWS_FAST_BILINEAR //默认算法 还有其他算法, null, null, null //额外参数 在flasgs指定的算法,而使用的参数。如果  SWS_BICUBIC  SWS_GAUSS  SWS_LANCZOS这些算法。  这里没有使用);if (_pConvertContext == null) throw new ApplicationException("Could not initialize the conversion context.");//获取媒体帧所需要的大小var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, (int)destinationSize.Width, (int)destinationSize.Height, 1);//申请非托管内存,unsafe代码_convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);//转换帧的内存指针_dstData = new byte_ptrArray4();_dstLinesize = new int_array4();//挂在帧数据的内存区把_dstData里存的的指针指向_convertedFrameBufferPtrffmpeg.av_image_fill_arrays(ref _dstData, ref _dstLinesize, (byte*)_convertedFrameBufferPtr, destinationPixelFormat, (int)destinationSize.Width, (int)destinationSize.Height, 1);}public void Dispose(){Marshal.FreeHGlobal(_convertedFrameBufferPtr);ffmpeg.sws_freeContext(_pConvertContext);}public AVFrame Convert(AVFrame sourceFrame){//转换格式ffmpeg.sws_scale(_pConvertContext, sourceFrame.data, sourceFrame.linesize, 0, sourceFrame.height, _dstData, _dstLinesize);var data = new byte_ptrArray8();data.UpdateFrom(_dstData);var linesize = new int_array8();linesize.UpdateFrom(_dstLinesize);return new AVFrame{data = data,linesize = linesize,width = (int)_destinationSize.Width,height = (int)_destinationSize.Height};}}
}
using System;
using System.Collections.Generic;
using System.Drawing;
using System.IO;
using System.Runtime.InteropServices;
using System.Windows;
using FFmpeg.AutoGen;namespace FFmpegAnalyzer
{public sealed unsafe class VideoStreamDecoder : IDisposable{private readonly AVCodecContext* _pCodecContext;private readonly AVFormatContext* _pFormatContext;private readonly int _streamIndex;//private readonly AVFrame* _pFrame;//private readonly AVFrame* _receivedFrame;private readonly AVPacket* _pPacket;/// <summary>/// 视频解码器/// </summary>/// <param name="url">视频流URL</param>/// <param name="HWDeviceType">硬件解码器类型(默认AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)</param>public VideoStreamDecoder(string url, AVHWDeviceType HWDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE){//分配一个AVFormatContext_pFormatContext = ffmpeg.avformat_alloc_context();//分配一个AVFrame_receivedFrame = ffmpeg.av_frame_alloc();var pFormatContext = _pFormatContext;//将源音视频流传递给ffmpeg即ffmpeg打开源视频流ffmpeg.avformat_open_input(&pFormatContext, url, null, null);//获取音视频流信息ffmpeg.avformat_find_stream_info(_pFormatContext, null);AVCodec* codec = null;//在源里找到最佳的流,如果指定了解码器,则根据解码器寻找流,将解码器传递给codec_streamIndex = ffmpeg.av_find_best_stream(_pFormatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &codec, 0);//根据解码器分配一个AVCodecContext ,仅仅分配工具,还没有初始化。_pCodecContext = ffmpeg.avcodec_alloc_context3(codec);//如果硬解码if (HWDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE){//根据硬件编码类型创建AVHWDeviceContext,存在AVFormatContext.hw_device_ctx (_pCodecContext->hw_device_ctx)ffmpeg.av_hwdevice_ctx_create(&_pCodecContext->hw_device_ctx, HWDeviceType, null, null, 0);}//将最佳流的格式参数传递给codecContextffmpeg.avcodec_parameters_to_context(_pCodecContext, _pFormatContext->streams[_streamIndex]->codecpar);//根据codec初始化pCodecContext 。与_pCodecContext = ffmpeg.avcodec_alloc_context3(codec);对应ffmpeg.avcodec_open2(_pCodecContext, codec, null);CodecName = ffmpeg.avcodec_get_name(codec->id);FrameSize = new System.Drawing.Size(_pCodecContext->width, _pCodecContext->height);PixelFormat = _pCodecContext->pix_fmt;//分配AVPacket/* AVPacket用于存储压缩的数据,分别包括有音频压缩数据,视频压缩数据和字幕压缩数据。它通常在解复用操作后存储压缩数据,然后作为输入传给解码器。或者由编码器输出然后传递给复用器。对于视频压缩数据,一个AVPacket通常包括一个视频帧。对于音频压缩数据,可能包括几个压缩的音频帧。*/_pPacket = ffmpeg.av_packet_alloc();//分配AVFrame/*AVFrame用于存储解码后的音频或者视频数据。AVFrame必须通过av_frame_alloc进行分配,通过av_frame_free释放。*/_pFrame = ffmpeg.av_frame_alloc();}public string CodecName { get; }public System.Drawing.Size FrameSize { get; }public AVPixelFormat PixelFormat { get; }public void Dispose(){ffmpeg.av_frame_unref(_pFrame);ffmpeg.av_free(_pFrame);ffmpeg.av_packet_unref(_pPacket);ffmpeg.av_free(_pPacket);ffmpeg.avcodec_close(_pCodecContext);var pFormatContext = _pFormatContext;ffmpeg.avformat_close_input(&pFormatContext);}/// <summary>/// 解码下一帧帧/// </summary>/// <param name="frame">参数返回解码后的帧</param>/// <returns></returns>public bool TryDecodeNextFrame(out AVFrame frame){//取消帧的引用。帧将不会被任何资源引用ffmpeg.av_frame_unref(_pFrame);ffmpeg.av_frame_unref(_receivedFrame);int error;do{try{#region 读取帧忽略无效帧do{//读取无效帧error = ffmpeg.av_read_frame(_pFormatContext, _pPacket);//根据pFormatContext读取帧,返回到Packet中if (error == ffmpeg.AVERROR_EOF)//如果已经是影视片流末尾则返回{frame = *_pFrame;return false;}} while (_pPacket->stream_index != _streamIndex); //忽略掉音视频流里面与有效流(初始化(构造函数)时标记的_streamIndex)不一致的流#endregion//将帧数据放入解码器ffmpeg.avcodec_send_packet(_pCodecContext, _pPacket);  //将原始数据数据(_pPacket)作为输入提供给解码器(_pCodecContext)}finally{//消除对_pPacket的引用ffmpeg.av_packet_unref(_pPacket);}//读取解码器里解码(_pCodecContext)后的帧通过参数返回(_pFrame)error = ffmpeg.avcodec_receive_frame(_pCodecContext, _pFrame);} while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));//当返回值等于 EAGAIN(再试一次),if (_pCodecContext->hw_device_ctx != null)//如果配置了硬件解码则调用硬件解码器解码{//将_pFrame通过硬件解码后放入_receivedFrameffmpeg.av_hwframe_transfer_data(_receivedFrame, _pFrame, 0);frame = *_receivedFrame;}else{frame = *_pFrame;}return true;}/// <summary>/// 获取媒体TAG信息/// </summary>/// <returns></returns>public IReadOnlyDictionary<string, string> GetContextInfo(){AVDictionaryEntry* tag = null;var result = new Dictionary<string, string>();while ((tag = ffmpeg.av_dict_get(_pFormatContext->metadata, "", tag, ffmpeg.AV_DICT_IGNORE_SUFFIX)) != null){var key = Marshal.PtrToStringAnsi((IntPtr)tag->key);var value = Marshal.PtrToStringAnsi((IntPtr)tag->value);result.Add(key, value);}return result;}}
}

需要将ffmpeg的类库复制到生成目录上(对应FFmpegBinariesHelper.RegisterFFmpegBinaries()中的生成路径)

 使用代码:

FFmpegWrapper.RegisterFFmpeg();
_ffMpegWrapper = new FFmpegWrapper();
_ffMpegWrapper.CreateEncoder(new System.Drawing.Size(1920, 1080), true);_ffMpegWrapper1 = new FFmpegWrapper();
_ffMpegWrapper1.CreateDecoder(new System.Drawing.Size(1920, 1080), true);
var encodeFrames = _ffMpegWrapper.EncodeFrames(Data);
var decodeFrames = _ffMpegWrapper1.DecodeFrames(encodeFrames);

本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如若转载,请注明出处:http://www.rhkb.cn/news/85587.html

如若内容造成侵权/违法违规/事实不符,请联系长河编程网进行投诉反馈email:809451989@qq.com,一经查实,立即删除!

相关文章

虚拟世界探索:科技之下的未来可能性

随着科技的飞速发展&#xff0c;人们对于虚拟世界的憧憬和探索也日益加深。虚拟世界&#xff0c;那是一个超越现实的概念&#xff0c;一个充满想象力和创造力的领域。然而&#xff0c;虚拟世界究竟有可能实现吗&#xff1f;这是一个引人深思的问题。 虚拟世界&#xff0c;首先让…

激光切割机的操作中蛙跳技术是什么意思

其实&#xff0c;蛙跳技术就是指在激光切割机运行的过程中&#xff0c;机器换位置的方式。打个比方&#xff0c;你刚刚在这儿把孔1切好了&#xff0c;接下来就得跑到那儿把孔2切了。 在这个过程中&#xff0c;激光切割机就像是一只青蛙&#xff0c;要从一个位置跳到另一个位置。…

机器学习笔记值优化算法(十四)梯度下降法在凸函数上的收敛性

机器学习笔记之优化算法——梯度下降法在凸函数上的收敛性 引言回顾&#xff1a;收敛速度&#xff1a;次线性收敛二次上界引理 梯度下降法在凸函数上的收敛性收敛性定理介绍证明过程 引言 本节将介绍梯度下降法在凸函数上的收敛性。 回顾&#xff1a; 收敛速度&#xff1a;次…

数据结构 | 二叉树的应用

目录 一、解析树 二、树的遍历 一、解析树 我们可以用解析树来表示现实世界中像句子或数学表达式这样的构造。 我们可以将((73)*(5-2))这样的数学表达式表示成解析树。这是完全括号表达式&#xff0c;乘法的优先级高于加法和减法&#xff0c;但因为有括号&#xff0c;所以在…

【Linux进阶之路】进程(上)

文章目录 前言一、操作系统加载过程二、进程1.基本概念2.基本信息①运行并观察进程②创建子进程③僵尸与孤儿进程&#xff08;父子进程衍生出来的问题&#xff09;1. 僵尸进程&#xff08;Zombie状态&#xff09;2. 孤儿进程 3.基本状态①操作系统的状态&#xff08;统一&#…

5.利用matlab完成 符号矩阵的转置和 符号方阵的幂运算(matlab程序)

1.简述 Matlab符号运算中的矩阵转置 转置向量或矩阵 B A. B transpose(A) 说明 B A. 返回 A 的非共轭转置&#xff0c;即每个元素的行和列索引都会互换。如果 A 包含复数元素&#xff0c;则 A. 不会影响虚部符号。例如&#xff0c;如果 A(3,2) 是 12i 且 B A.&#xff0…

【C++】红黑树模拟实现插入功能(包含旋转和变色)

红黑树模拟实现并封装为map和set 前言正式开始红黑树概念红黑树基本要求大致框架树节点树 调整红黑树使其平衡第一种&#xff1a;cur红&#xff0c;p红&#xff0c;g黑&#xff0c;u存在且为红第二种&#xff1a;cur红&#xff0c;p红&#xff0c;g黑&#xff0c;u不存在或为黑…

CentOS7安装Maven详细教程

&#x1f60a; 作者&#xff1a; Eric &#x1f496; 主页&#xff1a; https://blog.csdn.net/weixin_47316183?typeblog &#x1f389; 主题&#xff1a;CentOS7安装Maven详细教程 ⏱️ 创作时间&#xff1a; 2023年08月06日 第一步&#xff1a;上传或下载安装包&#x…

2021年12月 C/C++(一级)真题解析#中国电子学会#全国青少年软件编程等级考试

第1题&#xff1a;输出整数部分 输入一个双精度浮点数f&#xff0c; 输出其整数部分。 时间限制&#xff1a;1000 内存限制&#xff1a;65536 输入 一个双精度浮点数f(0 < f < 100000000)。 输出 一个整数&#xff0c;表示浮点数的整数部分。 样例输入 3.8889 样例输出 3…

opencv实战项目 手势识别-手势控制鼠标

手势识别系列文章目录 手势识别是一种人机交互技术&#xff0c;通过识别人的手势动作&#xff0c;从而实现对计算机、智能手机、智能电视等设备的操作和控制。 1. opencv实现手部追踪&#xff08;定位手部关键点&#xff09; 2.opencv实战项目 实现手势跟踪并返回位置信息&…

设计模式--策略模式

目录 一.场景 1.1场景 2.2 何时使用 2.3个人理解 二. 业务场景练习 2.1业务: 2.2具体实现 2.3思路 三.总结 3.1策略模式的特点&#xff1a; 3.2策略模式优点 3.3策略模式缺点 一.场景 1.1场景 许多相关的类仅仅是行为有异&#xff0c;也就是说业务代码需要根据场景不…

Linux 创建子进程

文章目录 前言一、进程&#xff0c;线程&#xff0c;程序 区分二、创建子进程三、创建多个进程1. 获取进程号2. 循环创建多个进程 四、进程工具。1. ps 查看当前进程.2. kill 进程终止. 总结 前言 在计算机科学中&#xff0c;进程&#xff08;Process&#xff09;、线程&#…

Leetcode-每日一题【剑指 Offer 19. 正则表达式匹配】

题目 请实现一个函数用来匹配包含. 和*的正则表达式。模式中的字符.表示任意一个字符&#xff0c;而*表示它前面的字符可以出现任意次&#xff08;含0次&#xff09;。在本题中&#xff0c;匹配是指字符串的所有字符匹配整个模式。例如&#xff0c;字符串"aaa"与模式…

uniapp-----封装接口

系列文章目录 uniapp-----封装接口 uniapp-----分包 文章目录 系列文章目录 uniapp-----封装接口 uniapp-----分包 文章目录 前言 一、技术 二、封装步骤 1.准备 ​编辑 2.代码填充 request.js&#xff1a; api.js&#xff1a; min.js 页面使用 总结 前言 uniapp的主包要求大…

视频添加字幕

1、依靠ffmpeg 命令 package zimu;import java.io.IOException;public class TestSrt {public static void main(String[] args) {String videoFile "/test/test1.mp4";String subtitleFile "/test/test1.SRT";String outputFile "/test/testout13…

记录一次使用python调用java代码

Python调用Java代码的主要原理是通过使用Java虚拟机&#xff08;JVM&#xff09;和相关的库/工具实现的。 在Python中&#xff0c;可以使用以下几种方式来调用Java代码&#xff1a; 使用subprocess模块&#xff1a;可以通过subprocess模块来启动一个子进程&#xff0c;并在子进…

Hadoop Hbase Hive 版本对照一览

这里写目录标题 一、Hadoop 与 Hbase 版本对照二、Hadoop 与 Hive 版本对照 官网内容记录&#xff0c;仅供参考 一、Hadoop 与 Hbase 版本对照 二、Hadoop 与 Hive 版本对照

怎样学会单片机

0、学单片机首先要明白&#xff0c;一个单片机啥也干不了&#xff0c;学单片机的目的是学习怎么用单片机驱动外部设备&#xff0c;比如数码管&#xff0c;电机&#xff0c;液晶屏等&#xff0c;这个需要外围电路的配合&#xff0c;所以学习单片机在这个层面上可以等同为学习单片…

Linux简介及基础操作

简介&#xff1a; 1、linux和windows都是操作系统&#xff0c;多任务&#xff0c;多用户&#xff0c;多线程… Linux免费使用&#xff0c;自由传播&#xff0c;开源 2、Linux 发行版&#xff08;都是基于linux内核穿的外套&#xff09; Ubuntu——嵌入式开发 fedora——早期嵌入…

Gradio:交互式Python数据应用程序的新前沿

一、说明 什么是Gradio以及如何使用Gradio在Python中创建DataApp或Web界面&#xff1f;使用 Gradio 将您的 Python 数据科学项目转换为交互式应用程序。 摄影&#xff1a;Elijah Merrell on Unsplash Gradio是一个Python库&#xff0c;允许我们快速为机器学习模型创建可定制的接…