MediaElement应用之顺畅播放实时流

  在以前的一篇文章中介绍过《MediaElement播放YUV实时流数据技巧 》,通过重载 MediaStreamSource 来解决视频监控情形下的实时流的播放问题。

  但最后有个问题一直没能解决,当时采用了“取巧”的方式:   

element.Pause();            
streamSource.AddStream(stream);
element.Play();

  年前又在研究Silverlight这方面的内容,在同事的提点下才知道是自己孤陋寡闻了,算是解决了这一问题。现在对其进行简要的说明:

 

  首先了解一个概念:时间戳——Time Stamp,在音视频领域主要理解为用以同步音视频的。

  以前出现的问题就是没有理解到【时间戳】这一概念及其应用,简单的为了能够进入GetSampleAsync获取 MediaStreamSample而不得不频繁的调用SetSource方法,没能在实际的流处理上下功夫。

  再来看看 MediaStreamSample的构造函数:

public MediaStreamSample(MediaStreamDescription mediaStreamDescription, Stream stream, long offset, long count,long timestamp, 
IDictionary<MediaSampleAttributeKeys, string> attributes);


public MediaStreamSample(MediaStreamDescription mediaStreamDescription, Stream stream, long offset, long count, long timestamp,
long duration, IDictionary<MediaSampleAttributeKeys, string> attributes);

  其中的:

  stream——实例使用到的流数据(不一定包含的是实际播放的数据,可以与offect和count等集合指示播放的数据)

  offect——相对于stream的字节偏移量(取决于stream的数据)

  count——播放实例包含的字节数

  timestamp——实例的时间戳

  duration——实例的间隔时间(可用可不用)

  attributes——描述实例的属性集合

 

  同样来看看实时流解码后的数据:

private void OnDecCBFun(int nPort, IntPtr pBuf, int nSize, ref NET_POSA_FRAME_INFO pFrameInfo, int nReserved1, int nReserved2){}

  nport——解码所用端口

  pBuf——解码后的数据指针

  nSize——解码后数据大小

  其中的【NET_POSA_FRAME_INFO】结构体如下:

public struct NET_POSA_FRAME_INFO
{
public int nWidth;
public int nHeight;
public int nStamp;
public int nType;
public int nFrameRate;
}

  nWidth——图像宽度

  nHeight——图像高度

  nStamp——时间戳

  nType——数据类型

  nFrameRate——帧率

 

  值得一提的是:

  MediaStreamSample的时间戳是相对时间戳,而解码数据里的时间戳是绝对时间戳,需要计算。

  MediaStreamSample的时间戳单位是100纳秒,解码数据的时间戳单位为秒(一开始没注意,会导致视频画面播放速度显得很快),需要转换。

   为了方便使用及理清条例,封装了以下几个类:  

RealFile 管理Sample
    public class RealFile
{
RealMediaTypes mediaType;
List<RealSample> videoSampleList;
long timestamp = 0;

public RealFile(int width, int height, RealMediaTypes type)
{
this.Height = height;
this.Width = width;
this.mediaType = type;
videoSampleList = new List<RealSample>();
}

#region Property
public long Height { get; set; }

public long Width { get; set; }
#endregion

public RealSample GetVideoSample()
{
if (videoSampleList.Count > 0)
{
RealSample sample = videoSampleList[0];
videoSampleList.RemoveAt(0);
return sample;
}
else
return null;
}

public void AddStream(RealStream realStream, long stamp)
{
if (timestamp == 0)
timestamp = stamp;
RealSample sample = new RealSample();
sample.Offset = 0;
sample.Count = realStream.Length;

if (mediaType == RealMediaTypes.YV12)
sample.Timestamp = (stamp - timestamp) * 10000;
else
sample.Timestamp = (stamp - timestamp);

sample.MediaStream = realStream.MediaStream;

videoSampleList.Add(sample);
}
}
RealSample Sample对应信息
    public class RealSample
{
public long Offset { get; set; }
public long Count { get; set; }
public long Timestamp { get; set; }
public long Duration { get; set; }
public Stream MediaStream { get; set; }
}
RealStream
    public class RealStream
{
private Stream mediaStream;
public Stream MediaStream
{
get { return mediaStream; }
set { mediaStream = value; }
}

public RealStream(Stream stream)
{
this.mediaStream = stream;
}

public virtual void Close()
{
this.mediaStream.Close();
}

#region Property
public virtual long Length
{
get { return this.mediaStream.Length; }
}

public virtual long Position
{
get { return this.mediaStream.Position; }
}
#endregion
}


  则RealMediaStreamSource的代码如下:

RealMediaStreamSource
    public enum RealMediaTypes
{
MBF,
YV12,
}

public class RealMediaStreamSource : MediaStreamSource
{
RealMediaTypes _mediaType;
Stream _mediaStream;
long _stamp;
int _width;
int _height;

RealFile realFile;

Dictionary<MediaSampleAttributeKeys, string> _mediaSampleAttributes = new Dictionary<MediaSampleAttributeKeys, string>();
Dictionary<MediaStreamAttributeKeys, string> _mediaStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
Dictionary<MediaSourceAttributesKeys, string> _mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
List<MediaStreamDescription> _mediaStreamDescriptions = new List<MediaStreamDescription>();

private int index;

public RealMediaStreamSource(RealMediaTypes type, Stream stream, int width, int height, long stamp)
{
this._mediaType = type;
this._mediaStream = stream;
this._width = width;
this._height = height;
this._stamp = stamp;
}

#region Override
protected override void OpenMediaAsync()
{
index = 0;
realFile = new RealFile(_width, _height, _mediaType);
RealStream stream = new RealStream(_mediaStream);
realFile.AddStream(stream, _stamp);

//描述媒体示例
_mediaSampleAttributes[MediaSampleAttributeKeys.FrameHeight] = _height.ToString();
_mediaSampleAttributes[MediaSampleAttributeKeys.FrameWidth] = _width.ToString();

//描述媒体流
_mediaStreamAttributes[MediaStreamAttributeKeys.Height] = _height.ToString();
_mediaStreamAttributes[MediaStreamAttributeKeys.Width] = _width.ToString();
_mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = "";
if (_mediaType == RealMediaTypes.MBF)
_mediaStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "H264";
else if (_mediaType == RealMediaTypes.YV12)
_mediaStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "YV12";

//详尽描述媒体流
_mediaStreamDescriptions.Add(new MediaStreamDescription(MediaStreamType.Video, _mediaStreamAttributes));

//描述媒体源
_mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "true";
_mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = "";

index++;
this.ReportOpenMediaCompleted(_mediaSourceAttributes, _mediaStreamDescriptions);
}

protected override void GetSampleAsync(MediaStreamType mediaStreamType)
{
MediaStreamSample mediaSample = null;
if (mediaStreamType == MediaStreamType.Video)
mediaSample = GetVideoSample();
if (mediaSample != null)
ReportGetSampleCompleted(mediaSample);
}

private MediaStreamSample GetVideoSample()
{
RealSample sample = realFile.GetVideoSample();
MediaStreamDescription msd = new MediaStreamDescription(MediaStreamType.Video, _mediaStreamAttributes);
MediaStreamSample mediaSample;
if (sample == null)
{
mediaSample = new MediaStreamSample(
msd,
_mediaStream,
0,
_mediaStream.Length,
0,
_mediaSampleAttributes);
}
else
{
mediaSample = new MediaStreamSample(
msd,
sample.MediaStream,
sample.Offset,
sample.Count,
sample.Timestamp,
_mediaSampleAttributes);
}
index--;
return mediaSample;
}

protected override void SeekAsync(long seekToTime)
{
this.ReportSeekCompleted(seekToTime);
}

protected override void CloseMedia()
{
}

protected override void SwitchMediaStreamAsync(MediaStreamDescription mediaStreamDescription)
{
}

protected override void GetDiagnosticAsync(MediaStreamSourceDiagnosticKind diagnosticKind)
{
}
#endregion

public void AddStream(Stream mediaStream, long stamp)
{
this._mediaStream = mediaStream;
RealStream addstream = new RealStream(mediaStream);
realFile.AddStream(addstream, stamp);
index++;
}
}

  RealMediaStreamSource具体调用如下:

调用
        bool isFirst = true;
void proxy_GetVideosReceived(object sender, GetVideosReceivedEventArgs e)
{
StreamData sources = e.data;
if (sources.Stamp == 0)
return;

if (isFirst)
{
object obj = new object();
lock (obj)
{
ShowStream(sources);
isFirst = false;
}
}
else
{
try
{
ShowStreamToo(sources);
}
catch (System.Exception ex)
{
}
}
}

private void ShowStream(StreamData sources)
{
Stream stream = new MemoryStream(sources.Streams);
streamSource1 = new RealMediaStreamSource(RealMediaTypes.YV12, stream, sources.Width, sources.Height, sources.Stamp);
element1.SetSource(streamSource1);
}

private void ShowStreamToo(StreamData sources)
{
Stream stream = new MemoryStream(sources.Streams);
((RealMediaStreamSource)streamSource1).AddStream(stream, sources.Stamp);
}


 

  至此便能够顺利的播放视频流啦,归根结底问题在于实例化的MediaStreamSample所包含的信息是否准确

   GetVideoSample()里的处理还需要大家根据自己的实际情况灵活应用哦!

  目前个人也还处于探索学习阶段,欢迎大家多多指教!



posted @ 2012-01-30 10:15  peony007  阅读(6515)  评论(16编辑  收藏  举报