C# 使用FFmpeg.Autogen對(duì)byte[]進(jìn)行編解碼,參考:https://github.com/vanjoge/CSharpVideoDemo
入口調(diào)用類:
using System;
using System.IO;
using System.Drawing;
using System.Runtime.InteropServices;
using FFmpeg.AutoGen;
namespace FFmpegAnalyzer
{
public class FFmpegWrapper
{
/// <summary>
/// 默認(rèn)的編碼格式
/// </summary>
public AVCodecID DefaultCodecFormat { get; set; } = AVCodecID.AV_CODEC_ID_H264;
/// <summary>
/// 注冊(cè)FFmpeg
/// </summary>
public static void RegisterFFmpeg()
{
FFmpegBinariesHelper.RegisterFFmpegBinaries();
// 初始化注冊(cè)ffmpeg相關(guān)的編碼器
ffmpeg.av_register_all();
ffmpeg.avcodec_register_all();
ffmpeg.avformat_network_init();
}
/// <summary>
/// 注冊(cè)日志
/// <exception cref="NotSupportedException">.NET Framework 不支持日志注冊(cè)</exception>
/// </summary>
private unsafe void RegisterFFmpegLogger()
{
// 設(shè)置記錄ffmpeg日志級(jí)別
ffmpeg.av_log_set_level(ffmpeg.AV_LOG_VERBOSE);
av_log_set_callback_callback logCallback = (p0, level, format, vl) =>
{
if (level > ffmpeg.av_log_get_level()) return;
var lineSize = 1024;
var lineBuffer = stackalloc byte[lineSize];
var printPrefix = 1;
ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);
var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);
Console.Write(line);
};
ffmpeg.av_log_set_callback(logCallback);
}
#region 編碼器
/// <summary>
/// 創(chuàng)建編碼器
/// </summary>
/// <param name="frameSize">編碼前一幀原始數(shù)據(jù)的大小</param>
/// <param name="isRgb">rgb數(shù)據(jù)</param>
public void CreateEncoder(Size frameSize, bool isRgb = true)
{
_fFmpegEncoder = new FFmpegEncoder(frameSize, isRgb);
_fFmpegEncoder.CreateEncoder(DefaultCodecFormat);
}
/// <summary>
/// 編碼
/// </summary>
/// <param name="frameBytes">編碼幀數(shù)據(jù)</param>
/// <returns></returns>
public byte[] EncodeFrames(byte[] frameBytes)
{
return _fFmpegEncoder.EncodeFrames(frameBytes);
}
/// <summary>
/// 釋放編碼器
/// </summary>
public void DisposeEncoder()
{
_fFmpegEncoder.Dispose();
}
#endregion
#region 解碼器
/// <summary>
/// 創(chuàng)建解碼器
/// </summary>
/// <param name="decodedFrameSize">解碼后數(shù)據(jù)的大小</param>
/// <param name="isRgb">Rgb數(shù)據(jù)</param>
public void CreateDecoder(Size decodedFrameSize, bool isRgb = true)
{
_fFmpegDecoder = new FFmpegDecoder(decodedFrameSize, isRgb);
_fFmpegDecoder.CreateDecoder(DefaultCodecFormat);
}
/// <summary>
/// 解碼
/// </summary>
/// <param name="frameBytes">解碼幀數(shù)據(jù)</param>
/// <returns></returns>
public byte[] DecodeFrames(byte[] frameBytes)
{
return _fFmpegDecoder.DecodeFrames(frameBytes);
}
/// <summary>
/// 釋放解碼器
/// </summary>
public void DisposeDecoder()
{
_fFmpegDecoder.Dispose();
}
#endregion
/// <summary>編碼器</summary>
private FFmpegEncoder _fFmpegEncoder;
/// <summary>解碼器</summary>
private FFmpegDecoder _fFmpegDecoder;
}
}
其它業(yè)務(wù)類:
using System;
using System.IO;
using System.Runtime.InteropServices;
namespace FFmpegAnalyzer
{
internal class FFmpegBinariesHelper
{
private const string LD_LIBRARY_PATH = "LD_LIBRARY_PATH";
internal static void RegisterFFmpegBinaries()
{
switch (Environment.OSVersion.Platform)
{
case PlatformID.Win32NT:
case PlatformID.Win32S:
case PlatformID.Win32Windows:
var current = AppDomain.CurrentDomain.BaseDirectory;
var probe = $"FFmpeg/bin/{(Environment.Is64BitProcess ? @"x64" : @"x86")}";
while (current != null)
{
var ffmpegDirectory = Path.Combine(current, probe);
if (Directory.Exists(ffmpegDirectory))
{
Console.WriteLine($"FFmpeg binaries found in: {ffmpegDirectory}");
RegisterLibrariesSearchPath(ffmpegDirectory);
return;
}
current = Directory.GetParent(current)?.FullName;
}
break;
case PlatformID.Unix:
case PlatformID.MacOSX:
var libraryPath = Environment.GetEnvironmentVariable(LD_LIBRARY_PATH);
RegisterLibrariesSearchPath(libraryPath);
break;
}
}
private static void RegisterLibrariesSearchPath(string path)
{
switch (Environment.OSVersion.Platform)
{
case PlatformID.Win32NT:
case PlatformID.Win32S:
case PlatformID.Win32Windows:
SetDllDirectory(path);
break;
case PlatformID.Unix:
case PlatformID.MacOSX:
string currentValue = Environment.GetEnvironmentVariable(LD_LIBRARY_PATH);
if (string.IsNullOrWhiteSpace(currentValue) == false && currentValue.Contains(path) == false)
{
string newValue = currentValue + Path.PathSeparator + path;
Environment.SetEnvironmentVariable(LD_LIBRARY_PATH, newValue);
}
break;
}
}
[DllImport("kernel32", SetLastError = true)]
private static extern bool SetDllDirectory(string lpPathName);
}
}
using System;
using System.Drawing;
using System.Runtime.InteropServices;
using FFmpeg.AutoGen;
namespace FFmpegAnalyzer
{
/// <summary>
/// 解碼器
/// </summary>
internal unsafe class FFmpegDecoder
{
/// <param name="decodedFrameSize">解碼后數(shù)據(jù)的大小</param>
/// <param name="isRgb">Rgb數(shù)據(jù)</param>
public FFmpegDecoder(Size decodedFrameSize, bool isRgb = true)
{
_decodedFrameSize = decodedFrameSize;
_isRgb = isRgb;
}
/// <summary>
/// 創(chuàng)建解碼器
/// </summary>
/// <param name="codecFormat">解碼格式</param>
public void CreateDecoder(AVCodecID codecFormat)
{
var originPixelFormat = AVPixelFormat.AV_PIX_FMT_YUV420P;
var destinationPixelFormat = _isRgb ? AVPixelFormat.AV_PIX_FMT_RGB24 : AVPixelFormat.AV_PIX_FMT_BGRA;
//獲取解碼器
_pDecodec = ffmpeg.avcodec_find_decoder(codecFormat);
if (_pDecodec == null) throw new InvalidOperationException("Codec not found.");
_pDecodecContext = ffmpeg.avcodec_alloc_context3(_pDecodec);
_pDecodecContext->width = _decodedFrameSize.Width;
_pDecodecContext->height = _decodedFrameSize.Height;
_pDecodecContext->time_base = new AVRational { num = 1, den = 30 };
_pDecodecContext->pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P;
_pDecodecContext->framerate = new AVRational { num = 30, den = 1 };
_pDecodecContext->gop_size = 30;
// 設(shè)置預(yù)測(cè)算法
_pDecodecContext->flags |= ffmpeg.AV_CODEC_FLAG_PSNR;
_pDecodecContext->flags2 |= ffmpeg.AV_CODEC_FLAG2_FAST;
_pDecodecContext->max_b_frames = 0;
ffmpeg.av_opt_set(_pDecodecContext->priv_data, "preset", "veryfast", 0);
ffmpeg.av_opt_set(_pDecodecContext->priv_data, "tune", "zerolatency", 0);
//打開(kāi)解碼器
ffmpeg.avcodec_open2(_pDecodecContext, _pDecodec, null);
_pConvertContext = ffmpeg.sws_getContext(
_decodedFrameSize.Width,
_decodedFrameSize.Height,
originPixelFormat,
_decodedFrameSize.Width,
_decodedFrameSize.Height,
destinationPixelFormat,
ffmpeg.SWS_FAST_BILINEAR,
null, null, null);
if (_pConvertContext == null)
throw new ApplicationException("Could not initialize the conversion context.");
var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, _decodedFrameSize.Width, _decodedFrameSize.Height, 1);
_convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
_dstData = new byte_ptrArray4();
_dstLineSize = new int_array4();
ffmpeg.av_image_fill_arrays(ref _dstData, ref _dstLineSize, (byte*)_convertedFrameBufferPtr, destinationPixelFormat,
_decodedFrameSize.Width, _decodedFrameSize.Height, 1);
_isCodecRunning = true;
}
/// <summary>
/// 解碼
/// </summary>
/// <param name="frameBytes"></param>
/// <returns></returns>
public byte[] DecodeFrames(byte[] frameBytes)
{
if (!_isCodecRunning)
{
throw new InvalidOperationException("解碼器未運(yùn)行!");
}
var waitDecodePacket = ffmpeg.av_packet_alloc();
var waitDecoderFrame = ffmpeg.av_frame_alloc();
ffmpeg.av_frame_unref(waitDecoderFrame);
fixed (byte* waitDecodeData = frameBytes)
{
waitDecodePacket->data = waitDecodeData;
waitDecodePacket->size = frameBytes.Length;
ffmpeg.av_frame_unref(waitDecoderFrame);
try
{
int error;
do
{
ffmpeg.avcodec_send_packet(_pDecodecContext, waitDecodePacket);
error = ffmpeg.avcodec_receive_frame(_pDecodecContext, waitDecoderFrame);
} while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));
}
finally
{
ffmpeg.av_packet_unref(waitDecodePacket);
}
var decodeAfterFrame = ConvertToRgb(waitDecoderFrame);
var length = _isRgb
? decodeAfterFrame.height * decodeAfterFrame.width * 3
: decodeAfterFrame.height * decodeAfterFrame.width * 4;
byte[] buffer = new byte[length];
Marshal.Copy((IntPtr)decodeAfterFrame.data[0], buffer, 0, buffer.Length);
return buffer;
}
}
/// <summary>
/// 釋放
/// </summary>
public void Dispose()
{
_isCodecRunning = false;
//釋放解碼器
ffmpeg.avcodec_close(_pDecodecContext);
ffmpeg.av_free(_pDecodecContext);
//釋放轉(zhuǎn)換器
Marshal.FreeHGlobal(_convertedFrameBufferPtr);
ffmpeg.sws_freeContext(_pConvertContext);
}
/// <summary>
/// 轉(zhuǎn)換成Rgb
/// </summary>
/// <param name="waitDecoderFrame"></param>
/// <returns></returns>
private AVFrame ConvertToRgb(AVFrame* waitDecoderFrame)
{
ffmpeg.sws_scale(_pConvertContext, waitDecoderFrame->data, waitDecoderFrame->linesize, 0, waitDecoderFrame->height, _dstData, _dstLineSize);
var decodeAfterData = new byte_ptrArray8();
decodeAfterData.UpdateFrom(_dstData);
var lineSize = new int_array8();
lineSize.UpdateFrom(_dstLineSize);
ffmpeg.av_frame_unref(waitDecoderFrame);
return new AVFrame
{
data = decodeAfterData,
linesize = lineSize,
width = _decodedFrameSize.Width,
height = _decodedFrameSize.Height
};
}
//解碼器
private AVCodec* _pDecodec;
private AVCodecContext* _pDecodecContext;
//轉(zhuǎn)換緩存區(qū)
private IntPtr _convertedFrameBufferPtr;
private byte_ptrArray4 _dstData;
private int_array4 _dstLineSize;
//格式轉(zhuǎn)換
private SwsContext* _pConvertContext;
private Size _decodedFrameSize;
private readonly bool _isRgb;
//解碼器正在運(yùn)行
private bool _isCodecRunning;
}
}
using System;
using System.Runtime.InteropServices;
using System.Drawing;
using FFmpeg.AutoGen;
namespace FFmpegAnalyzer
{
/// <summary>
/// 編碼器
/// </summary>
internal unsafe class FFmpegEncoder
{
/// <param name="frameSize">編碼前一幀原始數(shù)據(jù)的大小</param>
/// <param name="isRgb">rgb數(shù)據(jù)</param>
public FFmpegEncoder(Size frameSize, bool isRgb = true)
{
_frameSize = frameSize;
_isRgb = isRgb;
_rowPitch = isRgb ? _frameSize.Width * 3 : _frameSize.Width * 4;
}
/// <summary>
/// 創(chuàng)建編碼器
/// </summary>
public void CreateEncoder(AVCodecID codecFormat)
{
var originPixelFormat = _isRgb ? AVPixelFormat.AV_PIX_FMT_RGB24 : AVPixelFormat.AV_PIX_FMT_BGRA;
var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_YUV420P;
_pCodec = ffmpeg.avcodec_find_encoder(codecFormat);
if (_pCodec == null)
throw new InvalidOperationException("Codec not found.");
_pCodecContext = ffmpeg.avcodec_alloc_context3(_pCodec);
_pCodecContext->width = _frameSize.Width;
_pCodecContext->height = _frameSize.Height;
_pCodecContext->framerate = new AVRational { num = 30, den = 1 };
_pCodecContext->time_base = new AVRational {num = 1, den = 30};
_pCodecContext->gop_size = 30;
_pCodecContext->pix_fmt = destinationPixelFormat;
// 設(shè)置預(yù)測(cè)算法
_pCodecContext->flags |= ffmpeg.AV_CODEC_FLAG_PSNR;
_pCodecContext->flags2 |= ffmpeg.AV_CODEC_FLAG2_FAST;
_pCodecContext->max_b_frames = 0;
ffmpeg.av_opt_set(_pCodecContext->priv_data, "preset", "veryfast", 0);
ffmpeg.av_opt_set(_pCodecContext->priv_data, "tune", "zerolatency", 0);
//打開(kāi)編碼器
ffmpeg.avcodec_open2(_pCodecContext, _pCodec, null);
_pConvertContext = ffmpeg.sws_getContext(_frameSize.Width, _frameSize.Height, originPixelFormat, _frameSize.Width, _frameSize.Height, destinationPixelFormat,
ffmpeg.SWS_FAST_BILINEAR, null, null, null);
if (_pConvertContext == null)
throw new ApplicationException("Could not initialize the conversion context.");
var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, _frameSize.Width, _frameSize.Height, 1);
_convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
_dstData = new byte_ptrArray4();
_dstLineSize = new int_array4();
ffmpeg.av_image_fill_arrays(ref _dstData, ref _dstLineSize, (byte*)_convertedFrameBufferPtr, destinationPixelFormat, _frameSize.Width, _frameSize.Height, 1);
_isCodecRunning = true;
}
/// <summary>
/// 釋放
/// </summary>
public void Dispose()
{
if (!_isCodecRunning) return;
_isCodecRunning = false;
//釋放編碼器
ffmpeg.avcodec_close(_pCodecContext);
ffmpeg.av_free(_pCodecContext);
//釋放轉(zhuǎn)換器
Marshal.FreeHGlobal(_convertedFrameBufferPtr);
ffmpeg.sws_freeContext(_pConvertContext);
}
/// <summary>
/// 編碼
/// </summary>
/// <param name="frameBytes"></param>
/// <returns></returns>
public byte[] EncodeFrames(byte[] frameBytes)
{
if (!_isCodecRunning)
{
throw new InvalidOperationException("編碼器未運(yùn)行!");
}
fixed (byte* pBitmapData = frameBytes)
{
var waitToYuvFrame = new AVFrame
{
data = new byte_ptrArray8 { [0] = pBitmapData },
linesize = new int_array8 { [0] = _rowPitch },
height = _frameSize.Height
};
var rgbToYuv = ConvertToYuv(waitToYuvFrame, _frameSize.Width, _frameSize.Height);
byte[] buffer;
var pPacket = ffmpeg.av_packet_alloc();
try
{
int error;
do
{
ffmpeg.avcodec_send_frame(_pCodecContext, &rgbToYuv);
error = ffmpeg.avcodec_receive_packet(_pCodecContext, pPacket);
} while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));
buffer = new byte[pPacket->size];
Marshal.Copy(new IntPtr(pPacket->data), buffer, 0, pPacket->size);
}
finally
{
ffmpeg.av_frame_unref(&rgbToYuv);
ffmpeg.av_packet_unref(pPacket);
}
return buffer;
}
}
/// <summary>
/// 轉(zhuǎn)換成Yuv格式
/// </summary>
/// <param name="waitConvertYuvFrame"></param>
/// <param name="width"></param>
/// <param name="height"></param>
/// <returns></returns>
private AVFrame ConvertToYuv(AVFrame waitConvertYuvFrame, int width, int height)
{
ffmpeg.sws_scale(_pConvertContext, waitConvertYuvFrame.data, waitConvertYuvFrame.linesize, 0, waitConvertYuvFrame.height, _dstData, _dstLineSize);
var data = new byte_ptrArray8();
data.UpdateFrom(_dstData);
var lineSize = new int_array8();
lineSize.UpdateFrom(_dstLineSize);
ffmpeg.av_frame_unref(&waitConvertYuvFrame);
return new AVFrame
{
data = data,
linesize = lineSize,
width = width,
height = height
};
}
//編碼器
private AVCodec* _pCodec;
private AVCodecContext* _pCodecContext;
//轉(zhuǎn)換緩存區(qū)
private IntPtr _convertedFrameBufferPtr;
private byte_ptrArray4 _dstData;
private int_array4 _dstLineSize;
//格式轉(zhuǎn)換
private SwsContext* _pConvertContext;
private Size _frameSize;
private readonly int _rowPitch;
private readonly bool _isRgb;
//編碼器正在運(yùn)行
private bool _isCodecRunning;
}
}
using FFmpeg.AutoGen;
using System;
using System.Drawing;
using System.Runtime.InteropServices;
using System.Windows;
namespace FFmpegAnalyzer
{
public sealed unsafe class VideoFrameConverter : IDisposable
{
private readonly IntPtr _convertedFrameBufferPtr;
private readonly System.Drawing.Size _destinationSize;
private readonly byte_ptrArray4 _dstData;
private readonly int_array4 _dstLinesize;
private readonly SwsContext* _pConvertContext;
/// <summary>
/// 幀格式轉(zhuǎn)換
/// </summary>
/// <param name="sourceSize"></param>
/// <param name="sourcePixelFormat"></param>
/// <param name="destinationSize"></param>
/// <param name="destinationPixelFormat"></param>
public VideoFrameConverter(System.Drawing.Size sourceSize, AVPixelFormat sourcePixelFormat,
System.Drawing.Size destinationSize, AVPixelFormat destinationPixelFormat)
{
_destinationSize = destinationSize;
//分配并返回一個(gè)SwsContext。您需要它使用sws_scale()執(zhí)行伸縮/轉(zhuǎn)換操作
//主要就是使用SwsContext進(jìn)行轉(zhuǎn)換!??!
_pConvertContext = ffmpeg.sws_getContext((int)sourceSize.Width, (int)sourceSize.Height, sourcePixelFormat,
(int)destinationSize.Width,
(int)destinationSize.Height
, destinationPixelFormat,
ffmpeg.SWS_FAST_BILINEAR //默認(rèn)算法 還有其他算法
, null
, null
, null //額外參數(shù) 在flasgs指定的算法,而使用的參數(shù)。如果 SWS_BICUBIC SWS_GAUSS SWS_LANCZOS這些算法。 這里沒(méi)有使用
);
if (_pConvertContext == null) throw new ApplicationException("Could not initialize the conversion context.");
//獲取媒體幀所需要的大小
var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat
, (int)destinationSize.Width, (int)destinationSize.Height
, 1);
//申請(qǐng)非托管內(nèi)存,unsafe代碼
_convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
//轉(zhuǎn)換幀的內(nèi)存指針
_dstData = new byte_ptrArray4();
_dstLinesize = new int_array4();
//掛在幀數(shù)據(jù)的內(nèi)存區(qū)把_dstData里存的的指針指向_convertedFrameBufferPtr
ffmpeg.av_image_fill_arrays(ref _dstData, ref _dstLinesize
, (byte*)_convertedFrameBufferPtr
, destinationPixelFormat
, (int)destinationSize.Width, (int)destinationSize.Height
, 1);
}
public void Dispose()
{
Marshal.FreeHGlobal(_convertedFrameBufferPtr);
ffmpeg.sws_freeContext(_pConvertContext);
}
public AVFrame Convert(AVFrame sourceFrame)
{
//轉(zhuǎn)換格式
ffmpeg.sws_scale(_pConvertContext
, sourceFrame.data
, sourceFrame.linesize
, 0, sourceFrame.height
, _dstData, _dstLinesize);
var data = new byte_ptrArray8();
data.UpdateFrom(_dstData);
var linesize = new int_array8();
linesize.UpdateFrom(_dstLinesize);
return new AVFrame
{
data = data,
linesize = linesize,
width = (int)_destinationSize.Width,
height = (int)_destinationSize.Height
};
}
}
}
using System;
using System.Collections.Generic;
using System.Drawing;
using System.IO;
using System.Runtime.InteropServices;
using System.Windows;
using FFmpeg.AutoGen;
namespace FFmpegAnalyzer
{
public sealed unsafe class VideoStreamDecoder : IDisposable
{
private readonly AVCodecContext* _pCodecContext;
private readonly AVFormatContext* _pFormatContext;
private readonly int _streamIndex;
//
private readonly AVFrame* _pFrame;
//
private readonly AVFrame* _receivedFrame;
private readonly AVPacket* _pPacket;
/// <summary>
/// 視頻解碼器
/// </summary>
/// <param name="url">視頻流URL</param>
/// <param name="HWDeviceType">硬件解碼器類型(默認(rèn)AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)</param>
public VideoStreamDecoder(string url, AVHWDeviceType HWDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
{
//分配一個(gè)AVFormatContext
_pFormatContext = ffmpeg.avformat_alloc_context();
//分配一個(gè)AVFrame
_receivedFrame = ffmpeg.av_frame_alloc();
var pFormatContext = _pFormatContext;
//將源音視頻流傳遞給ffmpeg即ffmpeg打開(kāi)源視頻流
ffmpeg.avformat_open_input(&pFormatContext, url, null, null);
//獲取音視頻流信息
ffmpeg.avformat_find_stream_info(_pFormatContext, null);
AVCodec* codec = null;
//在源里找到最佳的流,如果指定了解碼器,則根據(jù)解碼器尋找流,將解碼器傳遞給codec
_streamIndex = ffmpeg.av_find_best_stream(_pFormatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &codec, 0);
//根據(jù)解碼器分配一個(gè)AVCodecContext ,僅僅分配工具,還沒(méi)有初始化。
_pCodecContext = ffmpeg.avcodec_alloc_context3(codec);
//如果硬解碼
if (HWDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
{
//根據(jù)硬件編碼類型創(chuàng)建AVHWDeviceContext,存在AVFormatContext.hw_device_ctx (_pCodecContext->hw_device_ctx)
ffmpeg.av_hwdevice_ctx_create(&_pCodecContext->hw_device_ctx, HWDeviceType, null, null, 0);
}
//將最佳流的格式參數(shù)傳遞給codecContext
ffmpeg.avcodec_parameters_to_context(_pCodecContext, _pFormatContext->streams[_streamIndex]->codecpar);
//根據(jù)codec初始化pCodecContext 。與_pCodecContext = ffmpeg.avcodec_alloc_context3(codec);對(duì)應(yīng)
ffmpeg.avcodec_open2(_pCodecContext, codec, null);
CodecName = ffmpeg.avcodec_get_name(codec->id);
FrameSize = new System.Drawing.Size(_pCodecContext->width, _pCodecContext->height);
PixelFormat = _pCodecContext->pix_fmt;
//分配AVPacket
/* AVPacket用于存儲(chǔ)壓縮的數(shù)據(jù),分別包括有音頻壓縮數(shù)據(jù),視頻壓縮數(shù)據(jù)和字幕壓縮數(shù)據(jù)。
它通常在解復(fù)用操作后存儲(chǔ)壓縮數(shù)據(jù),然后作為輸入傳給解碼器?;蛘哂删幋a器輸出然后傳遞給復(fù)用器。
對(duì)于視頻壓縮數(shù)據(jù),一個(gè)AVPacket通常包括一個(gè)視頻幀。對(duì)于音頻壓縮數(shù)據(jù),可能包括幾個(gè)壓縮的音頻幀。
*/
_pPacket = ffmpeg.av_packet_alloc();
//分配AVFrame
/*AVFrame用于存儲(chǔ)解碼后的音頻或者視頻數(shù)據(jù)。
AVFrame必須通過(guò)av_frame_alloc進(jìn)行分配,通過(guò)av_frame_free釋放。
*/
_pFrame = ffmpeg.av_frame_alloc();
}
public string CodecName { get; }
public System.Drawing.Size FrameSize { get; }
public AVPixelFormat PixelFormat { get; }
public void Dispose()
{
ffmpeg.av_frame_unref(_pFrame);
ffmpeg.av_free(_pFrame);
ffmpeg.av_packet_unref(_pPacket);
ffmpeg.av_free(_pPacket);
ffmpeg.avcodec_close(_pCodecContext);
var pFormatContext = _pFormatContext;
ffmpeg.avformat_close_input(&pFormatContext);
}
/// <summary>
/// 解碼下一幀幀
/// </summary>
/// <param name="frame">參數(shù)返回解碼后的幀</param>
/// <returns></returns>
public bool TryDecodeNextFrame(out AVFrame frame)
{
//取消幀的引用。幀將不會(huì)被任何資源引用
ffmpeg.av_frame_unref(_pFrame);
ffmpeg.av_frame_unref(_receivedFrame);
int error;
do
{
try
{
#region 讀取幀忽略無(wú)效幀
do
{
//讀取無(wú)效幀
error = ffmpeg.av_read_frame(_pFormatContext, _pPacket);//根據(jù)pFormatContext讀取幀,返回到Packet中
if (error == ffmpeg.AVERROR_EOF)//如果已經(jīng)是影視片流末尾則返回
{
frame = *_pFrame;
return false;
}
} while (_pPacket->stream_index != _streamIndex); //忽略掉音視頻流里面與有效流(初始化(構(gòu)造函數(shù))時(shí)標(biāo)記的_streamIndex)不一致的流
#endregion
//將幀數(shù)據(jù)放入解碼器
ffmpeg.avcodec_send_packet(_pCodecContext, _pPacket); //將原始數(shù)據(jù)數(shù)據(jù)(_pPacket)作為輸入提供給解碼器(_pCodecContext)
}
finally
{
//消除對(duì)_pPacket的引用
ffmpeg.av_packet_unref(_pPacket);
}
//讀取解碼器里解碼(_pCodecContext)后的幀通過(guò)參數(shù)返回(_pFrame)
error = ffmpeg.avcodec_receive_frame(_pCodecContext, _pFrame);
} while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));//當(dāng)返回值等于 EAGAIN(再試一次),
if (_pCodecContext->hw_device_ctx != null)//如果配置了硬件解碼則調(diào)用硬件解碼器解碼
{
//將_pFrame通過(guò)硬件解碼后放入_receivedFrame
ffmpeg.av_hwframe_transfer_data(_receivedFrame, _pFrame, 0);
frame = *_receivedFrame;
}
else
{
frame = *_pFrame;
}
return true;
}
/// <summary>
/// 獲取媒體TAG信息
/// </summary>
/// <returns></returns>
public IReadOnlyDictionary<string, string> GetContextInfo()
{
AVDictionaryEntry* tag = null;
var result = new Dictionary<string, string>();
while ((tag = ffmpeg.av_dict_get(_pFormatContext->metadata, "", tag, ffmpeg.AV_DICT_IGNORE_SUFFIX)) != null)
{
var key = Marshal.PtrToStringAnsi((IntPtr)tag->key);
var value = Marshal.PtrToStringAnsi((IntPtr)tag->value);
result.Add(key, value);
}
return result;
}
}
}
需要將ffmpeg的類庫(kù)復(fù)制到生成目錄上(對(duì)應(yīng)FFmpegBinariesHelper.RegisterFFmpegBinaries()中的生成路徑)
文章來(lái)源:http://www.zghlxwxcb.cn/news/detail-639515.html
?使用代碼:文章來(lái)源地址http://www.zghlxwxcb.cn/news/detail-639515.html
FFmpegWrapper.RegisterFFmpeg();
_ffMpegWrapper = new FFmpegWrapper();
_ffMpegWrapper.CreateEncoder(new System.Drawing.Size(1920, 1080), true);
_ffMpegWrapper1 = new FFmpegWrapper();
_ffMpegWrapper1.CreateDecoder(new System.Drawing.Size(1920, 1080), true);
var encodeFrames = _ffMpegWrapper.EncodeFrames(Data);
var decodeFrames = _ffMpegWrapper1.DecodeFrames(encodeFrames);
到了這里,關(guān)于C# 使用FFmpeg.Autogen對(duì)byte[]進(jìn)行編解碼的文章就介紹完了。如果您還想了解更多內(nèi)容,請(qǐng)?jiān)谟疑辖撬阉鱐OY模板網(wǎng)以前的文章或繼續(xù)瀏覽下面的相關(guān)文章,希望大家以后多多支持TOY模板網(wǎng)!