用C#基于ffmpeg.autogen+NAudio播放器(带音频)
准备工作
首先在工程项目中安装必要的 NuGet 包:
1.FFmpeg.AutoGen2.NAudio
如果FFmpeg.AutoGen没有自动安装需支持FFmpeg文件,请下载文尾程序包并从中拷贝FFmpeg文件夹。
完整代码实现
using FFmpeg.AutoGen;
using NAudio.Wave;
using System;
using System.Drawing;
using System.Drawing.Imaging;
using System.Runtime.InteropServices;
using System.Threading;
using System.Windows.Forms;
namespace videoplayer
{
public unsafe class videoplay : IDisposable
{
public delegate void ShowBitmap(Bitmap bitmap);
private WaveOutEvent waveOut;
private BufferedWaveProvider waveProvider;
bool CanRun;
private SwsContext* pSwsContext;
private SwrContext* aSwrContext;
private AVCodecContext* _aCodecContext;
private AVFormatContext* _pFormatContext;
private bool _disposed = false;
public void DecodeVideo(string inputFile, ShowBitmap show)
{
CanRun = true;
int error = 0;
FFmpegBinariesHelper.RegisterFFmpegBinaries();
ffmpeg.av_register_all();
ffmpeg.avcodec_register_all();
ffmpeg.avformat_network_init();
AVFormatContext* pFormatContext = ffmpeg.avformat_alloc_context();
_pFormatContext = pFormatContext;
error = ffmpeg.avformat_open_input(&pFormatContext, inputFile, null, null);
if (error != 0) throw new ApplicationException(\"无法打开文件\");
error = ffmpeg.avformat_find_stream_info(pFormatContext, null);
if (error < 0) throw new ApplicationException(\"无法获取流信息\");
AVStream* pStream = null, aStream = null;
for (var i = 0; i nb_streams; i++)
{
if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
{
pStream = pFormatContext->streams[i];
}
else if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO)
{
aStream = pFormatContext->streams[i];
}
}
AVCodecContext pCodecContext, aCodecContext;
var dstData = new byte_ptrArray4();
var dstLinesize = new int_array4();
int width= default(int), height= default(int);
IntPtr convertedFrameBufferPtr= default(IntPtr);
int pStream_num = -1, aStream_num = -1;
if (pStream != null)
{
pCodecContext = *pStream->codec;
var pCodecId = pCodecContext.codec_id;
pStream_num = pStream->index;
width = pCodecContext.width;
height = pCodecContext.height;
var sourcePixFmt = pCodecContext.pix_fmt;
var destinationPixFmt = AVPixelFormat.AV_PIX_FMT_BGR24;
if (sourcePixFmt == AVPixelFormat.AV_PIX_FMT_NONE && pCodecId == AVCodecID.AV_CODEC_ID_H264)
{
sourcePixFmt = AVPixelFormat.AV_PIX_FMT_YUV420P;
}
pSwsContext = ffmpeg.sws_getContext(width, height, sourcePixFmt, width, height, destinationPixFmt, ffmpeg.SWS_FAST_BILINEAR, null, null, null);
if (pSwsContext == null) throw new ApplicationException(\"无法初始化转换上下文。\");
var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixFmt, width, height, 1);
convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte*)convertedFrameBufferPtr, destinationPixFmt, width, height, 1);
AVCodec* pCodec = ffmpeg.avcodec_find_decoder(pCodecId);
if (pCodec == null) throw new ApplicationException(\"找不到视频解码器\");
if (ffmpeg.avcodec_open2(&pCodecContext, pCodec, null) < 0)
{
throw new ApplicationException(\"无法打开视频解码器\");
}
MessageBox.Show(pCodec->type.ToString());
}
if (aStream != null)
{
aCodecContext = *aStream->codec;
_aCodecContext = &aCodecContext;
var aCodecId = aCodecContext.codec_id;
aStream_num = aStream->index;
aSwrContext = ffmpeg.swr_alloc();
ffmpeg.av_opt_set_int(aSwrContext, \"in_channel_layout\", (long)(&aCodecContext)->channel_layout, 0);
ffmpeg.av_opt_set_int(aSwrContext, \"out_channel_layout\", (long)ffmpeg.AV_CH_LAYOUT_STEREO, 0);
ffmpeg.av_opt_set_int(aSwrContext, \"in_sample_rate\", (&aCodecContext)->sample_rate, 0);
ffmpeg.av_opt_set_int(aSwrContext, \"out_sample_rate\", 44100, 0);
ffmpeg.av_opt_set_sample_fmt(aSwrContext, \"in_sample_fmt\", (&aCodecContext)->sample_fmt, 0);
ffmpeg.av_opt_set_sample_fmt(aSwrContext, \"out_sample_fmt\", AVSampleFormat.AV_SAMPLE_FMT_S16, 0);
ffmpeg.swr_init(aSwrContext);
waveOut = new WaveOutEvent();
waveProvider = new BufferedWaveProvider(new WaveFormat(44100, 16, 2));
waveOut.Init(waveProvider);
waveOut.Play();
AVCodec* aCodec = ffmpeg.avcodec_find_decoder(aCodecId);
if (aCodec == null) throw new ApplicationException(\"找不到音频解码器\");
if (ffmpeg.avcodec_open2(&aCodecContext, aCodec, null) < 0)
{
throw new ApplicationException(\"无法打开音频解码器\");
}
}
AVFrame* _AVFrame = ffmpeg.av_frame_alloc();
AVPacket* pPacket = ffmpeg.av_packet_alloc();
int videoX = 0;
try
{
while (ffmpeg.av_read_frame(pFormatContext, pPacket) >= 0 & CanRun == true)
{
if (pPacket->stream_index == pStream_num)
{
int response = ffmpeg.avcodec_send_packet(&pCodecContext, pPacket);
if (response < 0)
{
throw new ApplicationException(\"将数据包发送到解码器时出错\");
}
while (response >= 0)
{
response = ffmpeg.avcodec_receive_frame(&pCodecContext, _AVFrame);
if (response == ffmpeg.AVERROR(ffmpeg.EAGAIN) && CanRun == true) continue;
if (response == ffmpeg.AVERROR_EOF) { break; }
else if (response < 0) { throw new ApplicationException(\"从解码器接收视频帧时出错\"); }
ffmpeg.sws_scale(pSwsContext, _AVFrame->data, _AVFrame->linesize, 0, (&pCodecContext)->height, dstData, dstLinesize);
var bitmap = new Bitmap(width, height, dstLinesize[0], PixelFormat.Format24bppRgb, convertedFrameBufferPtr);
show(bitmap);
}
ffmpeg.av_packet_unref(pPacket);
ffmpeg.av_frame_unref(_AVFrame);
videoX = 1;
}
else if (pPacket->stream_index == aStream_num)
{
int response = ffmpeg.avcodec_send_packet(&aCodecContext, pPacket);
if (response < 0 && response != ffmpeg.AVERROR(ffmpeg.EAGAIN))
{
break;
}
while (response >= 0)
{
response = ffmpeg.avcodec_receive_frame(&aCodecContext, _AVFrame);
if (response == ffmpeg.AVERROR(ffmpeg.EAGAIN) && CanRun == true) continue;
if (response == ffmpeg.AVERROR_EOF) { break; }
else if (response < 0) { throw new ApplicationException(\"从解码器接收音频帧时出错\"); }
byte* convertedData = (byte*)Marshal.AllocHGlobal(19200 * 2);
int outSamples = ffmpeg.swr_get_out_samples(aSwrContext, _AVFrame->nb_samples);
int convertedSize = ffmpeg.av_samples_alloc(&convertedData, null, 2, outSamples, AVSampleFormat.AV_SAMPLE_FMT_S16, 0);
outSamples = ffmpeg.swr_convert(aSwrContext, &convertedData, outSamples, _AVFrame->extended_data, _AVFrame->nb_samples);
int bufferSize = outSamples * 2 * 2;
byte[] buffer = new byte[bufferSize];
Marshal.Copy((IntPtr)convertedData, buffer, 0, bufferSize);
waveProvider.AddSamples(buffer, 0, bufferSize);
if (bufferSize * 2 <= waveProvider.BufferedBytes & videoX == 0)
{
Thread.Sleep(20);
}
ffmpeg.av_freep(&convertedData);
}
ffmpeg.av_packet_unref(pPacket);
ffmpeg.av_frame_unref(_AVFrame);
}
else { }
}
}
catch (ApplicationException ex) { }
finally
{
ffmpeg.av_packet_unref(pPacket);
ffmpeg.av_frame_unref(_AVFrame);
}
show(null);
Marshal.FreeHGlobal(convertedFrameBufferPtr);
ffmpeg.av_free(_AVFrame);
ffmpeg.sws_freeContext(pSwsContext);
ffmpeg.avformat_close_input(&pFormatContext);
}
public void Stop()
{
CanRun = false;
}
public void Dispose()
{
if (_disposed) return;
waveOut?.Stop();
waveOut?.Dispose();
if (aSwrContext != null)
{
var swrContext = aSwrContext;
ffmpeg.swr_free(&swrContext);
aSwrContext = null;
}
if (_aCodecContext != null)
{
ffmpeg.avcodec_close(_aCodecContext);
var pAudioCodecContext = _aCodecContext;
ffmpeg.avcodec_free_context(&pAudioCodecContext);
_aCodecContext = null;
}
if (_pFormatContext != null)
{
var pFormatContext = _pFormatContext;
ffmpeg.avformat_close_input(&pFormatContext);
_pFormatContext = null;
}
_disposed = true;
GC.SuppressFinalize(this);
}
~videoplay()
{
Dispose();
}
}
}
Form中调用代码
using System;
using System.Drawing;
using System.Threading;
using System.Windows.Forms;
namespace videoplayer
{
public partial class Form1 : Form
{
public Form1()
{
InitializeComponent();
}
videoplay _videoplayer = new videoplay();
Thread thPlayer;
private void button1_Click(object sender, EventArgs e)
{
button1.Enabled = false;
if (thPlayer != null)
{
_videoplayer.Stop();
thPlayer = null;
}
else
{
thPlayer = new Thread(DeCoding);
thPlayer.IsBackground = true;
thPlayer.Start();
button1.Text = \"停止播放\";
button1.Enabled = true;
}
//_videoplayer.typestring();
}
private unsafe void DeCoding()
{
string url = textBox1.Text;
try
{
Bitmap oldBmp = null;
videoplay.ShowBitmap show = (bmp) =>
{
this.Invoke(new MethodInvoker(() =>
{
this.pictureBox1.Image = bmp;
if (oldBmp != null)
{
oldBmp.Dispose();
}
oldBmp = bmp;
}));
};
_videoplayer.DecodeVideo(url,show);
}
finally
{
_videoplayer.Stop();
thPlayer = null;
this.Invoke(new MethodInvoker(() =>
{
button1.Text = \"开始播放\";
button1.Enabled = true;
}));
}
}
最后附上完整程序包下载链接:https://download.csdn.net/download/weixin_56483593/90578245
程序包视频流、音频流核心代码全部完成,几乎所有代码都有详细注释说明,层次分明、清晰,不用花大量时间去看视频里听一堆废话学习,还啥也没学到。如果要硬件加速或增加其它功能,请参阅FFmpeg.AutoGen官方说明书。
网上一直没有这个资源,只好自己写一个完整的,如果有帮助请点个赞。