在以前的一篇文章中介紹過《MediaElement播放YUV實時流數據技巧 》,通過重載 MediaStreamSource 來解決視頻監控情形下的實時流的播放問題。
但最后有個問題一直沒能解決,當時采用了“取巧”的方式:
element.Pause();
streamSource.AddStream(stream);
element.Play();
年前又在研究Silverlight這方面的內容,在同事的提點下才知道是自己孤陋寡聞了,算是解決了這一問題。現在對其進行簡要的說明:
首先了解一個概念:時間戳——Time Stamp,在音視頻領域主要理解為用以同步音視頻的。
以前出現的問題就是沒有理解到【時間戳】這一概念及其應用,簡單的為了能夠進入GetSampleAsync獲取 MediaStreamSample而不得不頻繁的調用SetSource方法,沒能在實際的流處理上下功夫。
再來看看 MediaStreamSample的構造函數:
public MediaStreamSample(MediaStreamDescription mediaStreamDescription, Stream stream, long offset, long count,long timestamp,
IDictionary<MediaSampleAttributeKeys, string> attributes);
public MediaStreamSample(MediaStreamDescription mediaStreamDescription, Stream stream, long offset, long count, long timestamp,
long duration, IDictionary<MediaSampleAttributeKeys, string> attributes);
其中的:
stream——實例使用到的流數據(不一定包含的是實際播放的數據,可以與offect和count等集合指示播放的數據)
offect——相對於stream的字節偏移量(取決於stream的數據)
count——播放實例包含的字節數
timestamp——實例的時間戳
duration——實例的間隔時間(可用可不用)
attributes——描述實例的屬性集合
同樣來看看實時流解碼后的數據:
private void OnDecCBFun(int nPort, IntPtr pBuf, int nSize, ref NET_POSA_FRAME_INFO pFrameInfo, int nReserved1, int nReserved2){}
nport——解碼所用端口
pBuf——解碼后的數據指針
nSize——解碼后數據大小
其中的【NET_POSA_FRAME_INFO】結構體如下:
public struct NET_POSA_FRAME_INFO
{
public int nWidth;
public int nHeight;
public int nStamp;
public int nType;
public int nFrameRate;
}
nWidth——圖像寬度
nHeight——圖像高度
nStamp——時間戳
nType——數據類型
nFrameRate——幀率
值得一提的是:
MediaStreamSample的時間戳是相對時間戳,而解碼數據里的時間戳是絕對時間戳,需要計算。
MediaStreamSample的時間戳單位是100納秒,解碼數據的時間戳單位為秒(一開始沒注意,會導致視頻畫面播放速度顯得很快),需要轉換。
為了方便使用及理清條例,封裝了以下幾個類:

public class RealFile
{
RealMediaTypes mediaType;
List<RealSample> videoSampleList;
long timestamp = 0;
public RealFile(int width, int height, RealMediaTypes type)
{
this.Height = height;
this.Width = width;
this.mediaType = type;
videoSampleList = new List<RealSample>();
}
#region Property
public long Height { get; set; }
public long Width { get; set; }
#endregion
public RealSample GetVideoSample()
{
if (videoSampleList.Count > 0)
{
RealSample sample = videoSampleList[0];
videoSampleList.RemoveAt(0);
return sample;
}
else
return null;
}
public void AddStream(RealStream realStream, long stamp)
{
if (timestamp == 0)
timestamp = stamp;
RealSample sample = new RealSample();
sample.Offset = 0;
sample.Count = realStream.Length;
if (mediaType == RealMediaTypes.YV12)
sample.Timestamp = (stamp - timestamp) * 10000;
else
sample.Timestamp = (stamp - timestamp);
sample.MediaStream = realStream.MediaStream;
videoSampleList.Add(sample);
}
}

public class RealSample
{
public long Offset { get; set; }
public long Count { get; set; }
public long Timestamp { get; set; }
public long Duration { get; set; }
public Stream MediaStream { get; set; }
}

public class RealStream
{
private Stream mediaStream;
public Stream MediaStream
{
get { return mediaStream; }
set { mediaStream = value; }
}
public RealStream(Stream stream)
{
this.mediaStream = stream;
}
public virtual void Close()
{
this.mediaStream.Close();
}
#region Property
public virtual long Length
{
get { return this.mediaStream.Length; }
}
public virtual long Position
{
get { return this.mediaStream.Position; }
}
#endregion
}
則RealMediaStreamSource的代碼如下:

public enum RealMediaTypes
{
MBF,
YV12,
}
public class RealMediaStreamSource : MediaStreamSource
{
RealMediaTypes _mediaType;
Stream _mediaStream;
long _stamp;
int _width;
int _height;
RealFile realFile;
Dictionary<MediaSampleAttributeKeys, string> _mediaSampleAttributes = new Dictionary<MediaSampleAttributeKeys, string>();
Dictionary<MediaStreamAttributeKeys, string> _mediaStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
Dictionary<MediaSourceAttributesKeys, string> _mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
List<MediaStreamDescription> _mediaStreamDescriptions = new List<MediaStreamDescription>();
private int index;
public RealMediaStreamSource(RealMediaTypes type, Stream stream, int width, int height, long stamp)
{
this._mediaType = type;
this._mediaStream = stream;
this._width = width;
this._height = height;
this._stamp = stamp;
}
#region Override
protected override void OpenMediaAsync()
{
index = 0;
realFile = new RealFile(_width, _height, _mediaType);
RealStream stream = new RealStream(_mediaStream);
realFile.AddStream(stream, _stamp);
//描述媒體示例
_mediaSampleAttributes[MediaSampleAttributeKeys.FrameHeight] = _height.ToString();
_mediaSampleAttributes[MediaSampleAttributeKeys.FrameWidth] = _width.ToString();
//描述媒體流
_mediaStreamAttributes[MediaStreamAttributeKeys.Height] = _height.ToString();
_mediaStreamAttributes[MediaStreamAttributeKeys.Width] = _width.ToString();
_mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = "";
if (_mediaType == RealMediaTypes.MBF)
_mediaStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "H264";
else if (_mediaType == RealMediaTypes.YV12)
_mediaStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "YV12";
//詳盡描述媒體流
_mediaStreamDescriptions.Add(new MediaStreamDescription(MediaStreamType.Video, _mediaStreamAttributes));
//描述媒體源
_mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "true";
_mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = "";
index++;
this.ReportOpenMediaCompleted(_mediaSourceAttributes, _mediaStreamDescriptions);
}
protected override void GetSampleAsync(MediaStreamType mediaStreamType)
{
MediaStreamSample mediaSample = null;
if (mediaStreamType == MediaStreamType.Video)
mediaSample = GetVideoSample();
if (mediaSample != null)
ReportGetSampleCompleted(mediaSample);
}
private MediaStreamSample GetVideoSample()
{
RealSample sample = realFile.GetVideoSample();
MediaStreamDescription msd = new MediaStreamDescription(MediaStreamType.Video, _mediaStreamAttributes);
MediaStreamSample mediaSample;
if (sample == null)
{
mediaSample = new MediaStreamSample(
msd,
_mediaStream,
0,
_mediaStream.Length,
0,
_mediaSampleAttributes);
}
else
{
mediaSample = new MediaStreamSample(
msd,
sample.MediaStream,
sample.Offset,
sample.Count,
sample.Timestamp,
_mediaSampleAttributes);
}
index--;
return mediaSample;
}
protected override void SeekAsync(long seekToTime)
{
this.ReportSeekCompleted(seekToTime);
}
protected override void CloseMedia()
{
}
protected override void SwitchMediaStreamAsync(MediaStreamDescription mediaStreamDescription)
{
}
protected override void GetDiagnosticAsync(MediaStreamSourceDiagnosticKind diagnosticKind)
{
}
#endregion
public void AddStream(Stream mediaStream, long stamp)
{
this._mediaStream = mediaStream;
RealStream addstream = new RealStream(mediaStream);
realFile.AddStream(addstream, stamp);
index++;
}
}
RealMediaStreamSource具體調用如下:

bool isFirst = true;
void proxy_GetVideosReceived(object sender, GetVideosReceivedEventArgs e)
{
StreamData sources = e.data;
if (sources.Stamp == 0)
return;
if (isFirst)
{
object obj = new object();
lock (obj)
{
ShowStream(sources);
isFirst = false;
}
}
else
{
try
{
ShowStreamToo(sources);
}
catch (System.Exception ex)
{
}
}
}
private void ShowStream(StreamData sources)
{
Stream stream = new MemoryStream(sources.Streams);
streamSource1 = new RealMediaStreamSource(RealMediaTypes.YV12, stream, sources.Width, sources.Height, sources.Stamp);
element1.SetSource(streamSource1);
}
private void ShowStreamToo(StreamData sources)
{
Stream stream = new MemoryStream(sources.Streams);
((RealMediaStreamSource)streamSource1).AddStream(stream, sources.Stamp);
}
至此便能夠順利的播放視頻流啦,歸根結底問題在於實例化的MediaStreamSample所包含的信息是否准確!
GetVideoSample()里的處理還需要大家根據自己的實際情況靈活應用哦!
目前個人也還處於探索學習階段,歡迎大家多多指教!