播放音视频的关键:视频的格式是h264,音频的格式是aac。使用ffmpeg探测流的方式来实现音视频流的解码播放。

数据处理逻辑:h264->yuv aac->pcm。

sdl2工具类

using sdl2;
using system;
using system.collections.generic;
using system.linq;
using system.runtime.interopservices;
using system.text;
using system.threading.tasks; 
namespace cvnetvideo
{
 public unsafe class sdlhelper
 {
  private intptr screen;
  private intptr sdlrenderer;
  private intptr sdltexture;
  sdl.sdl_rect sdlrect;
  sdl.sdl_event sdlevent;
  bool isinit = false;
  public sdlhelper()
  { 
  }  
  public void sdl_maximizewindow()
  {
 
  } 
  public int sdl_init(int width, int height, intptr intptr)
  {
   lock (this)
   {
    if (!isinit)
    {
     // 初始化调用sdl.sdl_init(sdl.sdl_init_video | sdl.sdl_init_audio | sdl.sdl_init_timer)
     if (sdl.sdl_init(sdl.sdl_init_video | sdl.sdl_init_audio | sdl.sdl_init_timer) < 0)
     {
      console.writeline("could not initialize sdl - {0}\n", sdl.sdl_geterror());
      return -1;
     }
     isinit = true;
    }
    #region sdl调用
    if (sdltexture != intptr.zero)
    {
     sdl.sdl_destroytexture(sdltexture);
    }
    if (sdlrenderer != intptr.zero)
    {
     sdl.sdl_destroyrenderer(sdlrenderer);
    }
    if (screen != intptr.zero)
    {
     sdl.sdl_destroywindow(screen);
     sdl.sdl_raisewindow(screen);
     sdl.sdl_restorewindow(screen);
    }
    //创建显示窗口 
    screen = sdl.sdl_createwindowfrom(intptr);
    sdl.sdl_showwindow(screen);
 
    sdl.sdl_setwindowsize(screen, width, height);
    //screen = sdl.sdl_createwindow("sdl event test", sdl.sdl_windowpos_undefined, sdl.sdl_windowpos_undefined, width, height, sdl.sdl_windowflags.sdl_window_opengl | sdl.sdl_windowflags.sdl_window_resizable);
    //screen = sdl.sdl_createwindow("sdl event test", sdl.sdl_windowpos_undefined, sdl.sdl_windowpos_undefined, screen_w, screen_h, sdl.sdl_windowflags.sdl_window_opengl | sdl.sdl_windowflags.sdl_window_resizable);
    if (screen == intptr.zero)
    {
     console.writeline("can't creat a window:{0}\n", sdl.sdl_geterror());
     return -1;
    }
 
    //创建渲染器
    sdlrenderer = sdl.sdl_createrenderer(screen, -1, sdl.sdl_rendererflags.sdl_renderer_accelerated);
    //创建纹理 
    sdltexture = sdl.sdl_createtexture(sdlrenderer, sdl.sdl_pixelformat_iyuv, (int)sdl.sdl_textureaccess.sdl_textureaccess_streaming, width, height);
    #endregion
 
    return 0;
   }
  } 
  public int sdl_display(int width, int height, intptr pixels, int pixelssize,
   int pitch)
  {
   lock (this)
   {
    #region sdl 视频数据渲染播放
    //设置纹理的数据
    sdlrect.x = 0;
    sdlrect.y = 0;
    sdlrect.w = width;
    sdlrect.h = height;
    //sdl.sdl_updatetexture(sdltexture, ref sdlrect, pixels, pitch);
    sdl.sdl_updatetexture(sdltexture, intptr.zero, pixels, pitch);
    //复制纹理信息到渲染器目标
    sdl.sdl_renderclear(sdltexture);
    //sdl.sdl_rect srcrect = sdlrect;
    //sdl.sdl_rendercopy(sdlrenderer, sdltexture, ref srcrect, ref sdlrect);
 
    sdl.sdl_rendercopy(sdlrenderer, sdltexture, intptr.zero, intptr.zero);
    //视频渲染显示
    sdl.sdl_renderpresent(sdlrenderer);
    return 0;
   }   
   #endregion   
  }
 }
 public unsafe class sdlaudio
 {
  class aa
  {
   public byte[] pcm;
   public int len;
  }
  int lastindex = 0;
 
  private list<aa> data = new list<aa>();
 
  //private list<byte> data = new list<byte>();
  sdl.sdl_audiocallback callback;
  public void playaudio(intptr pcm, int len)
  {
   lock (this)
   {
    byte[] bts = new byte[len];
    marshal.copy(pcm, bts, 0, len);
    data.add(new aa
    {
     len = len,
     pcm = bts
    });
   }
 
   //sdl.sdl_delay(10);
  }
  void sdl_audiocallback(intptr userdata, intptr stream, int len)
  {
   sdl 2.0 
   sdl.sdl_rwfrommem(stream, 0, len);
   //if (audio_len == 0)
   // return;
   //len = (len > audio_len ? audio_len : len);
   if (data.count == 0)
   {
    for (int i = 0; i < len; i++)
    {
     ((byte*)stream)[i] = 0;
    }
    return;
   }
   for (int i = 0; i < len; i++)
   {
    if (data[0].len > i)
    {
     ((byte*)stream)[i] = data[0].pcm[i];
    }
    else
     ((byte*)stream)[i] = 0;
   }
   data.removeat(0);   
  }
  public int sdl_init()
  {
   callback = sdl_audiocallback;
   #region sdl调用
    初始化调用sdl.sdl_init(sdl.sdl_init_video | sdl.sdl_init_audio | sdl.sdl_init_timer)
   //if (sdl.sdl_init(sdl.sdl_init_video | sdl.sdl_init_audio | sdl.sdl_init_timer) < 0)
   //{
   // console.writeline("could not initialize sdl - {0}\n", sdl.sdl_geterror());
   // return -1;
   //}
 
   #endregion 
   sdl.sdl_audiospec wanted_spec = new sdl.sdl_audiospec();
   wanted_spec.freq = 8000;
   wanted_spec.format = sdl.audio_s16;
   wanted_spec.channels = 1;
   wanted_spec.silence = 0;
   wanted_spec.samples = 320;
   wanted_spec.callback = callback; 
 
   if (sdl.sdl_openaudio(ref wanted_spec, intptr.zero) < 0)
   {
    console.writeline("can't open audio.");
    return -1;
   }
   //play 
   sdl.sdl_pauseaudio(0);
   return 0;
  } 
 } 
}

sdl实现了基础的播放功能。

c# mp4文件音视频编码器类

using cv.video.base;
using cv.video.base.ffmpeg;
using ffmpeg.autogen;
using jx;
using system;
using system.collections.generic;
using system.linq;
using system.runtime.interopservices;
using system.text;
using system.threading;
using system.threading.tasks;
namespace cvnetvideo.codec.video
{
public unsafe class jt1078codecformp4
{
/// <summary>
/// 指示当前解码是否在运行
/// </summary>
public bool isrun { get; protected set; }
/// <summary>
/// 视频线程
/// </summary>
private thread threadvideo;
/// <summary>
/// 音频线程
/// </summary>
private thread threadaudio;
/// <summary>
/// 退出控制
/// </summary>
private bool exit_thread = false;
/// <summary>
/// 暂停控制
/// </summary>
private bool pause_thread = false;
/// <summary>
/// 视频输出流videoindex
/// </summary>
private int videoindex = -1;
/// <summary>
/// 音频输出流audioindex
/// </summary>
private int audioindex = -1;
/// <summary>
/// 视频h264转yuv并使用sdl进行播放
/// </summary>
/// <param name="filename"></param>
/// <param name="sdlvideo"></param>
/// <returns></returns>
public unsafe int runvideo(string filename,sdlhelper sdlvideo)
{
isrun = true;
exit_thread = false;
pause_thread = false;
threadvideo = thread.currentthread;
int error, frame_count = 0;
int got_picture, ret;
swscontext* pswsctx = null;
avformatcontext* ofmt_ctx = null;
intptr convertedframebufferptr = intptr.zero;
try
{
// 注册编解码器
ffmpeg.avcodec_register_all();
// 获取文件信息上下文初始化
ofmt_ctx = ffmpeg.avformat_alloc_context();
// 打开媒体文件
error = ffmpeg.avformat_open_input(&ofmt_ctx, filename, null, null);
if (error != 0)
{
throw new applicationexception(ffmpegbinarieshelper.geterrormessage(error));
}
// 获取流的通道
for (int i = 0; i < ofmt_ctx->nb_streams; i++)
{
if (ofmt_ctx->streams[i]->codec->codec_type == avmediatype.avmedia_type_video)
{
videoindex = i;
console.writeline("video.............."+videoindex);
}
}
if (videoindex == -1)
{
console.writeline("couldn't find a video stream.(没有找到视频流)");
return -1;
}
// 视频流处理
if (videoindex > -1)
{
//获取视频流中的编解码上下文
avcodeccontext* pcodecctx = ofmt_ctx->streams[videoindex]->codec;
//根据编解码上下文中的编码id查找对应的解码
avcodec* pcodec = ffmpeg.avcodec_find_decoder(pcodecctx->codec_id);
if (pcodec == null)
{
console.writeline("没有找到编码器");
return -1;
}
//打开编码器
if (ffmpeg.avcodec_open2(pcodecctx, pcodec, null) < 0)
{
console.writeline("编码器无法打开");
return -1;
}
console.writeline("find a video stream.channel=" + videoindex);
//输出视频信息
var format = ofmt_ctx->iformat->name->tostring();
var len = (ofmt_ctx->duration) / 1000000;
var width = pcodecctx->width;
var height = pcodecctx->height;
console.writeline("video format:" + format);
console.writeline("video length:" + len);
console.writeline("video width&height:width=" + width + " height=" + height);
console.writeline("video codec name:" + pcodec->name->tostring());
//准备读取
//avpacket用于存储一帧一帧的压缩数据(h264)
//缓冲区,开辟空间
avpacket* packet = (avpacket*)ffmpeg.av_malloc((ulong)sizeof(avpacket));
//avframe用于存储解码后的像素数据(yuv)
//内存分配
avframe* pframe = ffmpeg.av_frame_alloc();
//yuv420
avframe* pframeyuv = ffmpeg.av_frame_alloc();
//只有指定了avframe的像素格式、画面大小才能真正分配内存
//缓冲区分配内存
int out_buffer_size = ffmpeg.avpicture_get_size(avpixelformat.av_pix_fmt_yuv420p, pcodecctx->width, pcodecctx->height);
byte* out_buffer = (byte*)ffmpeg.av_malloc((ulong)out_buffer_size);
//初始化缓冲区
ffmpeg.avpicture_fill((avpicture*)pframeyuv, out_buffer, avpixelformat.av_pix_fmt_yuv420p, pcodecctx->width, pcodecctx->height);
//用于转码(缩放)的参数,转之前的宽高,转之后的宽高,格式等
swscontext* sws_ctx = ffmpeg.sws_getcontext(pcodecctx->width, pcodecctx->height, avpixelformat.av_pix_fmt_yuv420p /*pcodecctx->pix_fmt*/, pcodecctx->width, pcodecctx->height, avpixelformat.av_pix_fmt_yuv420p, ffmpeg.sws_bicubic, null, null, null);
while (ffmpeg.av_read_frame(ofmt_ctx, packet) >= 0)
{
// 退出线程
if (exit_thread)
{
break;
}
// 暂停解析
if (pause_thread)
{
while (pause_thread)
{
thread.sleep(100);
}
}
//只要视频压缩数据(根据流的索引位置判断)
if (packet->stream_index == videoindex)
{
//解码一帧视频压缩数据,得到视频像素数据
ret = ffmpeg.avcodec_decode_video2(pcodecctx, pframe, &got_picture, packet);
if (ret < 0)
{
console.writeline("视频解码错误");
return -1;
}
// 读取解码后的帧数据
if (got_picture>0)
{
frame_count++;
console.writeline("视频帧数:第 " + frame_count + " 帧");
//avframe转为像素格式yuv420,宽高
ffmpeg.sws_scale(sws_ctx, pframe->data, pframe->linesize, 0, pcodecctx->height, pframeyuv->data, pframeyuv->linesize);
//sdl播放yuv数据
var data = out_buffer;
sdlvideo.sdl_display(pcodecctx->width, pcodecctx->height, (intptr)data, out_buffer_size, pframeyuv->linesize[0]);
}
}
//释放资源
ffmpeg.av_free_packet(packet);
} 
} 
}
catch (exception ex)
{
console.writeline(ex);
}
finally
{
if (&ofmt_ctx != null)
{
ffmpeg.avformat_close_input(&ofmt_ctx);//关闭流文件 
}    
}
isrun = false;
return 0;
}
/// <summary>
/// 音频aac转pcm并使用sdl进行播放
/// </summary>
/// <param name="filename"></param>
/// <param name="sdlaudio"></param>
/// <returns></returns>
public unsafe int runaudio(string filename, sdlaudio sdlaudio)
{
isrun = true;
exit_thread = false;
pause_thread = false;
threadaudio = thread.currentthread;
int error, frame_count = 0;
int got_frame, ret;
avformatcontext* ofmt_ctx = null;
swscontext* pswsctx = null;
intptr convertedframebufferptr = intptr.zero;
try
{
// 注册编解码器
ffmpeg.avcodec_register_all();
// 获取文件信息上下文初始化
ofmt_ctx = ffmpeg.avformat_alloc_context();
// 打开媒体文件
error = ffmpeg.avformat_open_input(&ofmt_ctx, filename, null, null);
if (error != 0)
{
throw new applicationexception(ffmpegbinarieshelper.geterrormessage(error));
}
// 获取流的通道
for (int i = 0; i < ofmt_ctx->nb_streams; i++)
{
if (ofmt_ctx->streams[i]->codec->codec_type == avmediatype.avmedia_type_audio)
{
audioindex = i;
console.writeline("audio.............." + audioindex);
}
}
if (audioindex == -1)
{
console.writeline("couldn't find a audio stream.(没有找到音频流)");
return -1;
}
// 音频流处理
if (audioindex > -1)
{
//根据索引拿到对应的流,根据流拿到解码器上下文
avcodeccontext* pcodectx = ofmt_ctx->streams[audioindex]->codec;
//再根据上下文拿到编解码id,通过该id拿到解码器
avcodec* pcodec = ffmpeg.avcodec_find_decoder(pcodectx->codec_id);
if (pcodec == null)
{
console.writeline("没有找到编码器");
return -1;
}
//打开编码器
if (ffmpeg.avcodec_open2(pcodectx,pcodec, null)<0)
{
console.writeline("编码器无法打开");
return -1;
}
console.writeline("find a audio stream. channel=" + audioindex);
//编码数据
avpacket* packet = (avpacket*)ffmpeg.av_malloc((ulong)(sizeof(avpacket)));
//解压缩数据
avframe* frame = ffmpeg.av_frame_alloc();
//frame->16bit 44100 pcm 统一音频采样格式与采样率
swrcontext* swrctx = ffmpeg.swr_alloc();
//重采样设置选项-----------------------------------------------------------start
//输入的采样格式
avsampleformat in_sample_fmt = pcodectx->sample_fmt;
//输出的采样格式 16bit pcm
avsampleformat out_sample_fmt = avsampleformat.av_sample_fmt_s16;
//输入的采样率
int in_sample_rate = pcodectx->sample_rate;
//输出的采样率
int out_sample_rate = 44100;
//输入的声道布局
long in_ch_layout = (long)pcodectx->channel_layout;
//输出的声道布局
int out_ch_layout = ffmpeg.av_ch_layout_mono;
ffmpeg.swr_alloc_set_opts(swrctx, out_ch_layout, out_sample_fmt, out_sample_rate, in_ch_layout, in_sample_fmt, in_sample_rate, 0, null);
ffmpeg.swr_init(swrctx);
//重采样设置选项-----------------------------------------------------------end
//获取输出的声道个数
int out_channel_nb = ffmpeg.av_get_channel_layout_nb_channels((ulong)out_ch_layout);
//存储pcm数据
byte* out_buffer = (byte*)ffmpeg.av_malloc(2 * 44100);
//一帧一帧读取压缩的音频数据avpacket
while (ffmpeg.av_read_frame(ofmt_ctx, packet) >= 0)
{
// 退出线程
if (exit_thread)
{
break;
}
// 暂停解析
if (pause_thread)
{
while (pause_thread)
{
thread.sleep(100);
}
}
if (packet->stream_index == audioindex)
{
//解码avpacket->avframe
ret = ffmpeg.avcodec_decode_audio4(pcodectx, frame, &got_frame, packet);
if (ret < 0)
{
console.writeline("音频解码失败");
return -1;
}
// 读取帧数据
if (got_frame>0)
{
frame_count++;
console.writeline("音频帧数:第 "+ frame_count + " 帧");
var data_ = frame->data;
ffmpeg.swr_convert(swrctx, &out_buffer, 2 * 44100,(byte**)&data_, frame->nb_samples);
//获取sample的size
int out_buffer_size = ffmpeg.av_samples_get_buffer_size(null, out_channel_nb, frame->nb_samples, out_sample_fmt, 1);
//写入文件进行测试
var data=out_buffer;
sdlaudio.playaudio((intptr)data, out_buffer_size);
}
}
ffmpeg.av_free_packet(packet);
} 
} 
}
catch (exception ex)
{
console.writeline(ex);
}
finally
{
if (&ofmt_ctx != null)
{
ffmpeg.avformat_close_input(&ofmt_ctx);//关闭流文件 
} 
}
isrun = false;
return 0;
} 
/// <summary>
/// 开启线程
/// </summary>
/// <param name="filename"></param>
/// <param name="sdlvideo"></param>
public void start(string filename, sdlhelper sdlvideo,sdlaudio sdlaudio)
{
// 视频线程
threadvideo = new thread(() =>
{
try
{
runvideo(filename, sdlvideo);
}
catch (exception ex)
{
sq.base.errorlog.writelog4ex("jt1078codecformp4.run video", ex);
}
});
threadvideo.isbackground = true;
threadvideo.start();
// 音频线程
threadaudio = new thread(() =>
{
try
{
runaudio(filename, sdlaudio);
}
catch (exception ex)
{
sq.base.errorlog.writelog4ex("jt1078codecformp4.run audio", ex);
}
});
threadaudio.isbackground = true;
threadaudio.start();
}
/// <summary>
/// 暂停继续
/// </summary>
public void goon()
{
pause_thread = false;
}
/// <summary>
/// 暂停
/// </summary>
public void pause()
{
pause_thread = true;
}
/// <summary>
/// 停止
/// </summary>
public void stop()
{
exit_thread = true;
}
}
}

暂停、继续、停止在此处的意义不大,因为解析的速度很快。

测试代码及效果图

/// <summary>
/// 播放
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void btnplay_click(object sender, eventargs e)
{
// 音视频媒体文件路径
string filename = "test.mp4";// 表示${project_home}/bin/debug/test.mp4
// 线程读取音视频流
jt1078codecformp4 = new jt1078codecformp4();
jt1078codecformp4.start(filename,sdlvideo,sdlaudio);
}

注意:此处出现绿色,是不正常的。修改播放方法的数据设置方式:

/// <summary>
/// 播放视频
/// </summary>
/// <param name="width"></param>
/// <param name="height"></param>
/// <param name="pixels"></param>
/// <param name="pixelssize"></param>
/// <param name="pitch"></param>
/// <returns></returns>
public int sdl_display(int width, int height, intptr pixels, int pixelssize,
int pitch)
{
lock (this)
{
while (ispause)
{
sdl.sdl_delay(20);//延迟播放
}
#region sdl 视频数据渲染播放
//设置纹理的数据
sdlrect.x = 0;
sdlrect.y = 0;
sdlrect.w = width;
sdlrect.h = height;
sdl.sdl_updatetexture(sdltexture, ref sdlrect, pixels, pitch);
//sdl.sdl_updatetexture(sdltexture, intptr.zero, pixels, pitch);//此处代码导致播放窗口绿色阴影
//复制纹理信息到渲染器目标
sdl.sdl_renderclear(sdltexture);
//sdl.sdl_rect srcrect = sdlrect;
//sdl.sdl_rendercopy(sdlrenderer, sdltexture, ref srcrect, ref sdlrect);
sdl.sdl_rendercopy(sdlrenderer, sdltexture, intptr.zero, intptr.zero);
//视频渲染显示
sdl.sdl_renderpresent(sdlrenderer);
//sdl.sdl_delay(40);
//sdl.sdl_pollevent(out sdlevent);
//switch (sdlevent.type)
//{
// case sdl.sdl_eventtype.sdl_quit:
//  sdl.sdl_quit();
//  return -1;
// default:
//  break;
//}
return 0;
} 
//sdl.sdl_renderclear(sdlrenderer);
//sdl.sdl_rendercopy(sdlrenderer, sdltexture, ref srcrect, ref sdlrect);
//sdl.sdl_renderpresent(sdlrenderer);
delay 40ms 
//sdl.sdl_delay(40);
#endregion 
//#region sdl 视频数据渲染播放
//设置纹理的数据
sdlrect.x = 0;
sdlrect.y = 0;
sdlrect.w = width;
sdlrect.h = height;
sdl.sdl_updatetexture(sdltexture, ref sdlrect, pixels, pitch);
//复制纹理信息到渲染器目标
sdl.sdl_rect srcrect = sdlrect;
sdl.sdl_rendercopy(sdlrenderer, sdltexture, ref srcrect, ref sdlrect);
//视频渲染显示
sdl.sdl_renderpresent(sdlrenderer);
//sdl.sdl_delay(40);
sdl.sdl_pollevent(out sdlevent);
switch (sdlevent.type)
{
case sdl.sdl_eventtype.sdl_quit:
sdl.sdl_quit();
return -1;
default:
break;
}
return 0;
//#endregion
}
}

关键代码:

sdl.sdl_updatetexture(sdltexture, ref sdlrect, pixels, pitch);

//sdl.sdl_updatetexture(sdltexture, intptr.zero, pixels, pitch);//此处代码导致播放窗口绿色阴影

修改后效果:

代码改进,采用同一个线程播放音视频:

/// <summary>
/// mp4播放(音视频使用同一个线程)
/// </summary>
public unsafe class jt1078codectoplaymp4two
{
/// <summary>
/// 指示当前解码是否在运行
/// </summary>
public bool isrun { get; protected set; }
/// <summary>
/// 当前线程
/// </summary>
private thread thread;
/// <summary>
/// 退出控制
/// </summary>
private bool exit_thread = false;
/// <summary>
/// 暂停控制
/// </summary>
private bool pause_thread = false;
/// <summary>
/// 视频输出流videoindex
/// </summary>
private int videoindex = -1;
/// <summary>
/// 音频输出流audioindex
/// </summary>
private int audioindex = -1; 
private bool isinit = false; 
int error;
avformatcontext* ofmt_ctx = null;
avpacket* packet;
avcodeccontext* pcodecctx_video;
avcodec* pcodec_video;
avframe* pframe_video;
avframe* pframeyuv_video;
swscontext* sws_ctx_video;
sdlhelper sdlvideo;
sdlaudio sdlaudio;
int out_buffer_size_video;
byte* out_buffer_video;
int video_frame_count, audio_frame_count; 
avcodeccontext* pcodectx_audio;
avcodec* pcodec_audio;
avframe* frame_audio;
swrcontext* swrctx_audio;
byte* out_buffer_audio;
int out_buffer_size_audio;
int out_channel_nb;
avsampleformat out_sample_fmt;
/// <summary>
/// 初始化
/// </summary>
/// <param name="filename"></param>
/// <param name="sdlvideo"></param>
/// <param name="sdlaudio"></param>
/// <returns></returns>
public int init(string filename, sdlhelper sdlvideo, sdlaudio sdlaudio)
{
avformatcontext* ofmt_ctx;
// 注册编解码器
ffmpeg.avcodec_register_all();
// 获取文件信息上下文初始化
ofmt_ctx = ffmpeg.avformat_alloc_context();
this.ofmt_ctx = ofmt_ctx;
// 打开媒体文件
error = ffmpeg.avformat_open_input(&ofmt_ctx, filename, null, null);
if (error != 0)
{
throw new applicationexception(ffmpegbinarieshelper.geterrormessage(error));
}
// 获取流的通道
for (int i = 0; i < ofmt_ctx->nb_streams; i++)
{
if (ofmt_ctx->streams[i]->codec->codec_type == avmediatype.avmedia_type_video)
{
videoindex = i;
console.writeline("video.............." + videoindex);
}
if (ofmt_ctx->streams[i]->codec->codec_type == avmediatype.avmedia_type_audio)
{
audioindex = i;
console.writeline("audio.............." + audioindex);
}
}
if (videoindex == -1)
{
console.writeline("couldn't find a video stream.(没有找到视频流)");
return -1;
}
if (audioindex == -1)
{
console.writeline("couldn't find a audio stream.(没有找到音频流)");
return -1;
}
#region 初始化视频
// 视频流处理
if (videoindex > -1)
{
//获取视频流中的编解码上下文
pcodecctx_video = ofmt_ctx->streams[videoindex]->codec;
//根据编解码上下文中的编码id查找对应的解码
pcodec_video = ffmpeg.avcodec_find_decoder(pcodecctx_video->codec_id);
if (pcodec_video == null)
{
console.writeline("没有找到编码器");
return -1;
}
//打开编码器
if (ffmpeg.avcodec_open2(pcodecctx_video, pcodec_video, null) < 0)
{
console.writeline("编码器无法打开");
return -1;
}
console.writeline("find a video stream.channel=" + videoindex);
//输出视频信息
var format = ofmt_ctx->iformat->name->tostring();
var len = (ofmt_ctx->duration) / 1000000;
var width = pcodecctx_video->width;
var height = pcodecctx_video->height;
console.writeline("video format:" + format);
console.writeline("video length:" + len);
console.writeline("video width&height:width=" + width + " height=" + height);
console.writeline("video codec name:" + pcodec_video->name->tostring());
//准备读取
//avpacket用于存储一帧一帧的压缩数据(h264)
//avframe用于存储解码后的像素数据(yuv)
//内存分配
pframe_video = ffmpeg.av_frame_alloc();
//yuv420
pframeyuv_video = ffmpeg.av_frame_alloc();
//只有指定了avframe的像素格式、画面大小才能真正分配内存
//缓冲区分配内存
out_buffer_size_video = ffmpeg.avpicture_get_size(avpixelformat.av_pix_fmt_yuv420p, pcodecctx_video->width, pcodecctx_video->height);
out_buffer_video = (byte*)ffmpeg.av_malloc((ulong)out_buffer_size_video);
//初始化缓冲区
ffmpeg.avpicture_fill((avpicture*)pframeyuv_video, out_buffer_video, avpixelformat.av_pix_fmt_yuv420p, pcodecctx_video->width, pcodecctx_video->height);
//用于转码(缩放)的参数,转之前的宽高,转之后的宽高,格式等
sws_ctx_video = ffmpeg.sws_getcontext(pcodecctx_video->width, pcodecctx_video->height, avpixelformat.av_pix_fmt_yuv420p /*pcodecctx->pix_fmt*/, pcodecctx_video->width, pcodecctx_video->height, avpixelformat.av_pix_fmt_yuv420p, ffmpeg.sws_bicubic, null, null, null);
}
#endregion
#region 初始化音频
// 音频流处理
if (audioindex > -1)
{
//根据索引拿到对应的流,根据流拿到解码器上下文
pcodectx_audio = ofmt_ctx->streams[audioindex]->codec;
//再根据上下文拿到编解码id,通过该id拿到解码器
pcodec_audio = ffmpeg.avcodec_find_decoder(pcodectx_audio->codec_id);
if (pcodec_audio == null)
{
console.writeline("没有找到编码器");
return -1;
}
//打开编码器
if (ffmpeg.avcodec_open2(pcodectx_audio, pcodec_audio, null) < 0)
{
console.writeline("编码器无法打开");
return -1;
}
console.writeline("find a audio stream. channel=" + audioindex);
//解压缩数据
frame_audio = ffmpeg.av_frame_alloc();
//frame->16bit 44100 pcm 统一音频采样格式与采样率
swrctx_audio = ffmpeg.swr_alloc();
//重采样设置选项-----------------------------------------------------------start
//输入的采样格式
avsampleformat in_sample_fmt = pcodectx_audio->sample_fmt;
//输出的采样格式 16bit pcm
out_sample_fmt = avsampleformat.av_sample_fmt_s16;
//输入的采样率
int in_sample_rate = pcodectx_audio->sample_rate;
//输出的采样率
int out_sample_rate = 44100;
//输入的声道布局
long in_ch_layout = (long)pcodectx_audio->channel_layout;
//输出的声道布局
int out_ch_layout = ffmpeg.av_ch_layout_mono;
ffmpeg.swr_alloc_set_opts(swrctx_audio, out_ch_layout, out_sample_fmt, out_sample_rate, in_ch_layout, in_sample_fmt, in_sample_rate, 0, null);
ffmpeg.swr_init(swrctx_audio);
//重采样设置选项-----------------------------------------------------------end
//获取输出的声道个数
out_channel_nb = ffmpeg.av_get_channel_layout_nb_channels((ulong)out_ch_layout);
//存储pcm数据
out_buffer_audio = (byte*)ffmpeg.av_malloc(2 * 44100);
}
#endregion
//缓冲区,开辟空间
packet = (avpacket*)ffmpeg.av_malloc((ulong)sizeof(avpacket));
// 设置sdl播放对象
this.sdlvideo = sdlvideo;
this.sdlaudio = sdlaudio; 
isinit = true; 
return 0;
} 
/// <summary>
/// 读取音视频流文件并进行播放
/// </summary>
public unsafe int readandplay()
{
isrun = true;
exit_thread = false;
pause_thread = false;
thread = thread.currentthread;
//int error, frame_count = 0;
int got_frame, ret;
//swscontext* pswsctx = null;
byte* out_audio_buffer = out_buffer_audio;
try
{ 
while (ffmpeg.av_read_frame(ofmt_ctx, packet) >= 0)
{
// 退出线程
if (exit_thread)
{
break;
}
// 暂停解析
while (pause_thread)
{
thread.sleep(100);
}
#region 视频h264转yuv并使用sdl进行播放
if (packet->stream_index == videoindex)
{
//解码一帧视频压缩数据,得到视频像素数据
ret = ffmpeg.avcodec_decode_video2(pcodecctx_video, pframe_video, &got_frame, packet);
if (ret < 0)
{
console.writeline("视频解码错误");
return -1;
}
// 读取解码后的帧数据
if (got_frame > 0)
{
double pts = 0; //ffmpeg.av_frame_get_best_effort_timestamp(pframeyuv_video);
//videostate* vs = null;
//vs->video_clock = pts;
//vs->video_st = ofmt_ctx->streams[videoindex];
//pts = synchronize_video(vs, pframe_video, pts);
//if (queue_picture(is, pframe, pts) < 0)
//{
// break;
//}
video_frame_count++;
//存在问题的pts计算
//int pts = video_frame_count++ * (pcodecctx_video->pkt_timebase.num * 1000 / 25 /* pcodecctx->pkt_timebase.den*/);
console.writeline("视频帧数:第 " + video_frame_count + " 帧");
//avframe转为像素格式yuv420,宽高
ffmpeg.sws_scale(sws_ctx_video, pframe_video->data, pframe_video->linesize, 0, pcodecctx_video->height, pframeyuv_video->data, pframeyuv_video->linesize);
console.writeline("视频: pts= " + packet->pts + " dts=" + packet->dts);
// sdl播放yuv数据:下面两种方式都可以进行播放
sdlvideo.sdl_display(pcodecctx_video->width, pcodecctx_video->height, (intptr)out_buffer_video, out_buffer_size_video, pframeyuv_video->linesize[0]);
//sdlvideo.sdl_display(pcodecctx_video->width, pcodecctx_video->height, (intptr)pframeyuv_video->data[0], out_buffer_size_video, pframeyuv_video->linesize[0]);
//deleytoplay_video(packet->pts);
}
}
#endregion
#region 音频aac转pcm并使用sdl进行播放
if (packet->stream_index == audioindex)
{
//解码avpacket->avframe
ret = ffmpeg.avcodec_decode_audio4(pcodectx_audio, frame_audio, &got_frame, packet);
if (ret < 0)
{
console.writeline("音频解码失败");
return -1;
}
// 读取帧数据
if (got_frame > 0)
{
audio_frame_count++;
console.writeline("音频帧数:第 " + audio_frame_count + " 帧");
// 变换音频
ffmpeg.swr_convert(swrctx_audio, &out_audio_buffer, 2 * 44100, (byte**)&frame_audio->data, frame_audio->nb_samples);
// 获取sample的size
out_buffer_size_audio = ffmpeg.av_samples_get_buffer_size(null, out_channel_nb, frame_audio->nb_samples, out_sample_fmt, 1);
console.writeline("音频: pts= " + packet->pts + " dts=" + packet->dts);
// sdl进行音频播放
sdlaudio.playaudio((intptr)out_audio_buffer, out_buffer_size_audio);
//deleytoplay_audio(packet->pts);
}
}
#endregion
thread.sleep(20);
//释放资源
ffmpeg.av_free_packet(packet);
} 
}
catch (exception ex)
{
console.writeline(ex);
}
finally
{
//if (&ofmt_ctx != null)
//{
// ffmpeg.avformat_close_input(&ofmt_ctx);//关闭流文件 
//}
}
isrun = false;
return 0;
}
/// <summary>
/// 开启线程
/// </summary>
/// <param name="filename"></param>
/// <param name="sdlvideo"></param>
/// <param name="sdlaudio"></param>
public void start()
{
if (!isinit)
{
messagebox.show("没有初始化");
}
thread = new thread(() =>
{
try
{
readandplay();
}
catch (exception ex)
{
sq.base.errorlog.writelog4ex("jt1078codecformp4.run video", ex);
}
});
thread.isbackground = true;
thread.start(); 
}
/// <summary>
/// 暂停继续
/// </summary>
public void goonplay()
{
pause_thread = false;
sdlvideo.playvideo();
sdlaudio.playaudio();
}
/// <summary>
/// 暂停
/// </summary>
public void pause()
{
pause_thread = true;
sdlvideo.pausevideo();
sdlaudio.pauseaudio();
}
/// <summary>
/// 停止
/// </summary>
public void stop()
{
exit_thread = true;
}
long lastpts_video = 0;
datetime lastts_video;
long lastpts_audio = 0;
datetime lastts_audio;
private void deleytoplay_video(long pts)
{
if (lastpts_video > 0 && lastts_video != null)
{
double delay = (datetime.now - lastts_video).totalmilliseconds;
var i = (int)(pts - lastpts_video - delay);
if (i >= 1)
{
thread.sleep(i);
}
}
lastts_video = datetime.now;
lastpts_video = pts;
}
private void deleytoplay_audio(long pts)
{
if (lastpts_audio > 0 && lastts_audio != null)
{
double delay = (datetime.now - lastts_audio).totalmilliseconds;
var i = (int)(pts - lastpts_audio - delay);
if (i >= 1)
{
thread.sleep(i);
}
}
lastts_audio = datetime.now;
lastpts_audio = pts;
}
# http://dranger.com/ffmpeg/tutorial05.html
//public struct videostate
//{
// public double video_clock; // pts of last decoded frame / predicted pts of next decoded frame
// public avstream* video_st;// video stream
//}
//public unsafe double synchronize_video(videostate* vs, avframe* src_frame, double pts)
//{
// double frame_delay;
// if (pts != 0)
// {
//  /* if we have pts, set video clock to it */
//  vs->video_clock = pts;
// }
// else
// {
//  /* if we aren't given a pts, set it to the clock */
//  pts = vs->video_clock;
// }
// /* update the video clock */
// frame_delay = av_q2d(vs->video_st->codec->time_base);
// /* if we are repeating a frame, adjust clock accordingly */
// frame_delay += src_frame->repeat_pict * (frame_delay * 0.5);
// vs->video_clock += frame_delay;
// return pts;
//}
//struct videopicture
//{
// double pts;
//}
//int queue_picture(videostate* vs, avframe* pframe, double pts)
//{
// if (vp->bmp)
// {
// ... convert picture ...
//   vp->pts = pts;
// ... alert queue ...
// }
//}
}

解决音视频同步问题版本

using cv.media.utils.filter;
using cv.video.base;
using cv.video.base.ffmpeg;
using ffmpeg.autogen;
using jx;
using system;
using system.collections.generic;
using system.linq;
using system.runtime.interopservices;
using system.text;
using system.threading;
using system.threading.tasks;
using system.windows.forms;
using static cvnetvideo.ucvideo;
namespace cvnetvideo.codec.video
{ 
/// <summary>
/// mp4播放(音视频使用同一个线程)
/// </summary>
public unsafe class jt1078codectoplaymp4
{ 
/// <summary>
/// 指示当前解码是否在运行
/// </summary>
public bool isrun { get; protected set; }
/// <summary>
/// 指示当前解码是否在暂停
/// </summary>
public bool ispause { get; protected set; }
/// <summary>
/// 当前线程
/// </summary>
public thread thread;
/// <summary>
/// 退出控制
/// </summary>
private bool exit_thread = false;
/// <summary>
/// 暂停控制
/// </summary>
private bool pause_thread = false;
/// <summary>
/// 视频输出流videoindex
/// </summary>
private int videoindex = -1;
/// <summary>
/// 音频输出流audioindex
/// </summary>
private int audioindex = -1;
/// <summary>
/// 是否初始化
/// </summary>
private bool isinit = false; 
int error;
avformatcontext* ofmt_ctx = null;
avpacket* packet;
avcodeccontext* pcodecctx_video;
avcodec* pcodec_video;
avframe* pframe_video;
avframe* pframeyuv_video;
swscontext* sws_ctx_video;
sdlhelper sdlvideo;
sdlaudio sdlaudio;
int out_buffer_size_video;
byte* out_buffer_video;
int video_frame_count, audio_frame_count; 
avcodeccontext* pcodectx_audio;
avcodec* pcodec_audio;
avframe* frame_audio;
swrcontext* swrctx_audio;
byte* out_buffer_audio;
int out_buffer_size_audio;
int out_channel_nb;
avsampleformat out_sample_fmt;
int contrast;// 对比度
int brightness;// 亮度
int contrast_last;// 对比度
int brightness_last;// 亮度
//对比度亮度
private videofiltering m_video_filtering = new videofiltering();
/// <summary>
/// 设置图像对比度和亮度
/// </summary>
/// <param name="contrast"></param>
/// <param name="brightness"></param>
/// <returns></returns>
public void setcontrastandbrightness(int contrast, int brightness)
{
this.contrast = contrast;
this.brightness = brightness;
}
/// <summary>
/// yuv宽度
/// </summary>
public int yuvwidth { get; set; }
/// <summary>
/// yuv高度
/// </summary>
public int yuvheight { get; set; }
/// <summary>
/// 记录上一帧数据
/// </summary>
list<avvideo> list = new list<avvideo>();
/// <summary>
/// 初始化
/// </summary>
/// <param name="filename"></param>
/// <param name="sdlvideo"></param>
/// <param name="sdlaudio"></param>
/// <returns></returns>
public int init(string filename, sdlhelper sdlvideo, sdlaudio sdlaudio)
{
avformatcontext* ofmt_ctx;
// 注册编解码器
ffmpeg.avcodec_register_all();
// 获取文件信息上下文初始化
ofmt_ctx = ffmpeg.avformat_alloc_context();
this.ofmt_ctx = ofmt_ctx;
// 打开媒体文件
error = ffmpeg.avformat_open_input(&ofmt_ctx, filename, null, null);
if (error != 0)
{
throw new applicationexception(ffmpegbinarieshelper.geterrormessage(error));
}
// 获取流的通道
for (int i = 0; i < ofmt_ctx->nb_streams; i++)
{
if (ofmt_ctx->streams[i]->codec->codec_type == avmediatype.avmedia_type_video)
{
videoindex = i;
console.writeline("video.............." + videoindex);
}
if (ofmt_ctx->streams[i]->codec->codec_type == avmediatype.avmedia_type_audio)
{
audioindex = i;
console.writeline("audio.............." + audioindex);
}
}
if (videoindex == -1)
{
console.writeline("couldn't find a video stream.(没有找到视频流)");
return -1;
}
if (audioindex == -1)
{
console.writeline("couldn't find a audio stream.(没有找到音频流)");
return -1;
}
#region 初始化视频
// 视频流处理
if (videoindex > -1)
{
//获取视频流中的编解码上下文
pcodecctx_video = ofmt_ctx->streams[videoindex]->codec;
//根据编解码上下文中的编码id查找对应的解码
pcodec_video = ffmpeg.avcodec_find_decoder(pcodecctx_video->codec_id);
if (pcodec_video == null)
{
console.writeline("没有找到编码器");
return -1;
}
//打开编码器
if (ffmpeg.avcodec_open2(pcodecctx_video, pcodec_video, null) < 0)
{
console.writeline("编码器无法打开");
return -1;
}
console.writeline("find a video stream.channel=" + videoindex);
//输出视频信息
var format = ofmt_ctx->iformat->name->tostring();
var len = (ofmt_ctx->duration) / 1000000;
var width = pcodecctx_video->width;
var height = pcodecctx_video->height;
console.writeline("video format:" + format);
console.writeline("video length:" + len);
console.writeline("video width&height:width=" + width + " height=" + height);
console.writeline("video codec name:" + pcodec_video->name->tostring());
//准备读取
//avpacket用于存储一帧一帧的压缩数据(h264)
//avframe用于存储解码后的像素数据(yuv)
//内存分配
pframe_video = ffmpeg.av_frame_alloc();
//yuv420
pframeyuv_video = ffmpeg.av_frame_alloc();
//只有指定了avframe的像素格式、画面大小才能真正分配内存
//缓冲区分配内存
out_buffer_size_video = ffmpeg.avpicture_get_size(avpixelformat.av_pix_fmt_yuv420p, pcodecctx_video->width, pcodecctx_video->height);
out_buffer_video = (byte*)ffmpeg.av_malloc((ulong)out_buffer_size_video);
//初始化缓冲区
ffmpeg.avpicture_fill((avpicture*)pframeyuv_video, out_buffer_video, avpixelformat.av_pix_fmt_yuv420p, pcodecctx_video->width, pcodecctx_video->height);
//用于转码(缩放)的参数,转之前的宽高,转之后的宽高,格式等
sws_ctx_video = ffmpeg.sws_getcontext(pcodecctx_video->width, pcodecctx_video->height, avpixelformat.av_pix_fmt_yuv420p /*pcodecctx->pix_fmt*/, pcodecctx_video->width, pcodecctx_video->height, avpixelformat.av_pix_fmt_yuv420p, ffmpeg.sws_bicubic, null, null, null);
}
#endregion
#region 初始化音频
// 音频流处理
if (audioindex > -1)
{
//根据索引拿到对应的流,根据流拿到解码器上下文
pcodectx_audio = ofmt_ctx->streams[audioindex]->codec;
//再根据上下文拿到编解码id,通过该id拿到解码器
pcodec_audio = ffmpeg.avcodec_find_decoder(pcodectx_audio->codec_id);
if (pcodec_audio == null)
{
console.writeline("没有找到编码器");
return -1;
}
//打开编码器
if (ffmpeg.avcodec_open2(pcodectx_audio, pcodec_audio, null) < 0)
{
console.writeline("编码器无法打开");
return -1;
}
console.writeline("find a audio stream. channel=" + audioindex);
//解压缩数据
frame_audio = ffmpeg.av_frame_alloc();
//frame->16bit 8000 pcm 统一音频采样格式与采样率
swrctx_audio = ffmpeg.swr_alloc();
//重采样设置选项-----------------------------------------------------------start
//输入的采样格式
avsampleformat in_sample_fmt = pcodectx_audio->sample_fmt;
//输出的采样格式 16bit pcm
out_sample_fmt = avsampleformat.av_sample_fmt_s16;
//输入的采样率
int in_sample_rate = pcodectx_audio->sample_rate;
//输出的采样率
int out_sample_rate = 8000;
//输入的声道布局
long in_ch_layout = (long)pcodectx_audio->channel_layout;
//输出的声道布局
int out_ch_layout = ffmpeg.av_ch_layout_mono;
ffmpeg.swr_alloc_set_opts(swrctx_audio, out_ch_layout, out_sample_fmt, out_sample_rate, in_ch_layout, in_sample_fmt, in_sample_rate, 0, null);
ffmpeg.swr_init(swrctx_audio);
//重采样设置选项-----------------------------------------------------------end
//获取输出的声道个数
out_channel_nb = ffmpeg.av_get_channel_layout_nb_channels((ulong)out_ch_layout);
//存储pcm数据
out_buffer_audio = (byte*)ffmpeg.av_malloc(2 * 8000);
}
#endregion
//缓冲区,开辟空间
packet = (avpacket*)ffmpeg.av_malloc((ulong)sizeof(avpacket));
// 设置sdl播放对象
this.sdlvideo = sdlvideo;
this.sdlaudio = sdlaudio; 
isinit = true; 
return 0;
} 
/// <summary>
/// 读取音视频流文件并进行播放
/// </summary>
public unsafe int readandplay(playfinisheddo playfinisheddo)
{
isrun = true;
exit_thread = false;
pause_thread = false;
thread = thread.currentthread;
//int error, frame_count = 0;
int got_frame, ret;
//swscontext* pswsctx = null;
byte* out_audio_buffer = out_buffer_audio;
try
{
avstream* video_stream = ofmt_ctx->streams[videoindex];
while (ffmpeg.av_read_frame(ofmt_ctx, packet) >= 0&& !exit_thread)
{
// 暂停解析
while (pause_thread||islastframe)
{
// 退出线程
if (exit_thread)
{
break;
}
thread.sleep(10);
}
// 退出线程
if (exit_thread)
{
break;
}
// 此处记录视频的第一帧和第一帧的开始时间
if (firstpts == -1 && packet->stream_index == videoindex)
{
firstpts = packet->pts * 1000 / (video_stream->time_base.den / video_stream->time_base.num);
startts = datetime.now;
}
// 针对视频做延时播放,音频自然播放就行不做处理
if (packet->stream_index == videoindex)
{
long pts_1 = packet->pts * 1000 / (video_stream->time_base.den / video_stream->time_base.num);
deleytoplay(pts_1);
}
#region 视频h264转yuv并使用sdl进行播放
if (packet->stream_index == videoindex)
{
//解码一帧视频压缩数据,得到视频像素数据
ret = ffmpeg.avcodec_decode_video2(pcodecctx_video, pframe_video, &got_frame, packet);
if (ret < 0)
{
console.writeline("视频解码错误");
return -1;
}
//滤波,亮度,对比度===参考jt1078toyuv -----------开始
int width = pcodecctx_video->width;
int height = pcodecctx_video->height;
if (contrast != contrast_last || brightness != brightness_last)
{
m_video_filtering.reset(width, height, contrast, brightness);
contrast_last = contrast;
brightness_last = brightness;
}
//滤波,亮度,对比度===参考jt1078toyuv -----------结束
// 读取解码后的帧数据
if (got_frame > 0)
{
video_frame_count++;
//>>>>滤波,亮度,对比度===参考jt1078toyuv -----------开始
avframe* frame_filter;
ret = m_video_filtering.filter(pframe_video, &frame_filter);
//>>>>滤波,亮度,对比度===参考jt1078toyuv -----------结束
//avframe转为像素格式yuv420,宽高
ffmpeg.sws_scale(sws_ctx_video, frame_filter->data, frame_filter->linesize, 0, pcodecctx_video->height, pframeyuv_video->data, pframeyuv_video->linesize);
// 记录上一帧图像保持10个帧数
avvideo videoframe = new avvideo(pcodecctx_video->width, pcodecctx_video->height, (intptr)out_buffer_video, out_buffer_size_video, pframeyuv_video->linesize[0]);
list.add(videoframe);
if (list.count > 10) list.removeat(0);
// sdl播放yuv数据:下面两种方式都可以进行播放
sdlvideo.sdl_display(pcodecctx_video->width, pcodecctx_video->height,yuvwidth, yuvheight, (intptr)out_buffer_video, out_buffer_size_video, pframeyuv_video->linesize[0]);
//sdlvideo.sdl_display(pcodecctx_video->width, pcodecctx_video->height, (intptr)pframeyuv_video->data[0], out_buffer_size_video, pframeyuv_video->linesize[0]);
// 播放下一帧时进行暂停
if (isnextframe)
{
pause();
isnextframe = false;
}
// 释放滤波
m_video_filtering.unrefframe();
}
}
#endregion
#region 音频aac转pcm并使用sdl进行播放
if (packet->stream_index == audioindex)
{
//解码avpacket->avframe
ret = ffmpeg.avcodec_decode_audio4(pcodectx_audio, frame_audio, &got_frame, packet);
if (ret < 0)
{
console.writeline("音频解码失败");
return -1;
}
// 读取帧数据
if (got_frame > 0)
{
audio_frame_count++;
// 变换音频
ffmpeg.swr_convert(swrctx_audio, &out_audio_buffer, 2 * 8000, (byte**)&frame_audio->data, frame_audio->nb_samples);
// 获取sample的size
out_buffer_size_audio = ffmpeg.av_samples_get_buffer_size(null, out_channel_nb, frame_audio->nb_samples, out_sample_fmt, 1);
// sdl进行音频播放
sdlaudio.playaudio((intptr)out_audio_buffer, out_buffer_size_audio); 
}
}
#endregion
//释放资源
ffmpeg.av_free_packet(packet);
thread.sleep(10);
} 
}
catch (exception ex)
{
console.writeline(ex);
}
finally
{
// 释放文件流
ffmpeg.avformat_free_context(ofmt_ctx);
// 修改右键菜单回调函数
playfinisheddo.invoke();
}
isrun = false;
ispause = false;
return 0;
}
bool islastframe = false;
bool isnextframe = false;
bool playfastly = false;
bool playslowly = false;
int play_speed = 1;
long firstpts = -1;
datetime startts;
/// <summary>
/// 控制快慢
/// </summary>
/// <param name="pts"></param>
/// <param name="speed"></param>
private void deleytoplay(long pts)
{
int delaytime = 0;
try
{
// 计算延时
double delay = (datetime.now - startts).totalmilliseconds;
var i = (int)(pts - firstpts - delay);
if (i >= 100)
{
delaytime = 40;
delaytime = controlfastorslow(delaytime);
}
else if (i >= 300)
{
delaytime = 60;
delaytime = controlfastorslow(delaytime);
}
else if (i >= 500)
{
delaytime = 100;
delaytime = controlfastorslow(delaytime);
}
}
catch
{
console.writeline("counting delay time error ");
}
finally
{
console.writeline("counting delay time = " + delaytime+ " play_speed="+ play_speed);
if (delaytime > 0)
thread.sleep(delaytime);
} 
}
/// <summary>
/// 控制快慢
/// </summary>
/// <param name="delaytime"></param>
private int controlfastorslow(int delaytime)
{
if (playfastly)
{
// 快放
delaytime /= play_speed;
}
else if (playslowly)
{
// 慢放
delaytime *= play_speed;
}
return delaytime;
} 
/// <summary>
/// 开启线程
/// </summary>
/// <param name="filename"></param>
/// <param name="sdlvideo"></param>
/// <param name="sdlaudio"></param>
public void start(playfinisheddo playfinisheddo)
{
if (!isinit)
{
messagebox.show("没有初始化");
}
thread = new thread(() =>
{
try
{
readandplay(playfinisheddo);
}
catch (exception ex)
{
sq.base.errorlog.writelog4ex("jt1078codecformp4.run video", ex);
}
});
thread.isbackground = true;
thread.start(); 
}
/// <summary>
/// 暂停继续
/// </summary>
public void goonplay()
{
// 重置第一帧pts,处理暂停后音视频不同步
firstpts = -1;
// 继续的相关操作和变量修改
pause_thread = false;
ispause = pause_thread;
sdlvideo.playvideo();
sdlaudio.playaudio();
}
/// <summary>
/// 暂停
/// </summary>
public void pause()
{
// 暂停的相关操作和变量修改
pause_thread = true;
ispause = pause_thread;
sdlvideo.pausevideo();
sdlaudio.pauseaudio();
}
/// <summary>
/// 停止
/// </summary>
public void stop()
{
exit_thread = true;
if (thread != null && thread.isalive)
{
thread.abort();
thread.join();
thread = null;
}
}
/// <summary>
/// 快放
/// </summary>
public void playfast()
{
if (pause_thread)
{
// 激活播放
goonplay();
}
if (playslowly)
{
play_speed = 1;
playslowly = false;
}
else
{
play_speed++;
}
playfastly = true; 
} 
/// <summary>
/// 慢放
/// </summary>
public void playslow()
{
if (pause_thread)
{
// 激活播放
goonplay();
}
if (playfastly)
{
play_speed = 1;
playfastly = false;
}
else
{
play_speed++;
}
playslowly = true; 
}
/// <summary>
/// 上一帧
/// </summary>
public void playlastframe()
{
// 修改上一帧标志
islastframe = true;
// 每点击一次向前播一帧
if (list.count>0)
{
console.writeline("剩余播放帧:"+ list.count);
// 激活播放
goonplay();
avvideo lastframe = list.last();
// 播放上一帧图像
sdlvideo.sdl_display(lastframe.width, lastframe.height, lastframe.pixels, lastframe.pixelssize, lastframe.pitch);
// 修改上一帧标志
islastframe = false;
// 移除已看过的帧
list.remove(lastframe);
thread.sleep(10);
pause();
}   
}
/// <summary>
/// 下一帧
/// </summary>
public void playnextframe()
{
// 暂停以区分帧
pause();
// 播放以完成下一帧图像显示或声音播放
goonplay();
// 下一帧播放完成暂停标志
isnextframe = true;
}
}
class media
{
/// <summary>
/// 0:video,1:audio
/// </summary>
public int type { get; set; }
/// <summary>
/// pts value
/// </summary>
public long pts { get; set; }
}
class avvideo : media
{
public int width { get; set; }
public int height { get; set; }
public intptr pixels { get; set; }
public int pixelssize { get; set; }
public int pitch { get; set; }
public avvideo(int width, int height, intptr pixels, int pixelssize, int pitch)
{
this.width = width;
this.height = height;
this.pixels = pixels;
this.pixelssize = pixelssize;
this.pitch = pitch;
}
}
class avaudio : media
{
public intptr pcm { get; set; }
public int len { get; set; }
public avaudio(intptr pcm, int len)
{
this.pcm = pcm;
this.len = len;
}
}
}

以上这篇c# 使用sdl2实现mp4文件播放音视频操作就是www.887551.com分享给大家的全部内容了,希望能给大家一个参考,也希望大家多多支持www.887551.com。