技术背景
Unity平台下,RTSP、RTMP播放和RTMP推送,甚至包括轻量级RTSP服务这块都不再赘述,今天探讨的一位开发者提到的问题,如果在Unity下,实现RTSP播放的同时,随时转RTMP推送出去?
RTSP转RTMP,在原生环境下老早已经有了,这里,其实就是把原生的挪到Unity即可,相关流程如下:
技术实现
本文以Windows平台为例,在RTSP播放模块的基础上,加个RTSP转RTMP推送模块,废话不多说,上代码:
实时播放、停止播放
/*
* SmartPlayerWinMono.cs.cs
*
* Author: daniusdk.com
* Created on 2017/04/19.
*/
public void StartPlayer(int sel)
{
Debug.Log("StartPlayer++, sel: " + sel);
if (videoctrl[sel].is_playing_)
{
Debug.Log("StartPlayer, already started.. sel: " + sel);
return;
}
lock (videoctrl[sel].frame_lock_)
{
videoctrl[sel].cur_video_frame_ = null;
}
if (!videoctrl[sel].is_recording_ && !videoctrl[sel].is_pulling_)
{
if (!OpenPlayerHandle(sel))
{
Debug.LogError("call OpenPlayerHandle failed, sel:" + sel);
return;
}
}
if (is_enable_hardware_decoder_)
{
NTSmartPlayerSDK.NT_SP_SetH264HardwareDecoder(videoctrl[sel].player_handle_, is_support_h264_hardware_decoder_ ? 1 : 0, 0);
NTSmartPlayerSDK.NT_SP_SetH265HardwareDecoder(videoctrl[sel].player_handle_, is_support_h265_hardware_decoder_ ? 1 : 0, 0);
}
else
{
NTSmartPlayerSDK.NT_SP_SetH264HardwareDecoder(videoctrl[sel].player_handle_, 0, 0);
NTSmartPlayerSDK.NT_SP_SetH265HardwareDecoder(videoctrl[sel].player_handle_, 0, 0);
}
//video frame callback (YUV/RGB)
videoctrl[sel].sdk_video_frame_call_back_ = new VideoControl.SetVideoFrameCallBack(SDKVideoFrameCallBack);
videoctrl[sel].video_frame_call_back_ = new SP_SDKVideoFrameCallBack(NT_SP_SetVideoFrameCallBack);
NTSmartPlayerSDK.NT_SP_SetVideoFrameCallBack(videoctrl[sel].player_handle_, (Int32)NT.NTSmartPlayerDefine.NT_SP_E_VIDEO_FRAME_FORMAT.NT_SP_E_VIDEO_FRAME_FROMAT_I420, window_handle_, videoctrl[sel].video_frame_call_back_);
UInt32 flag = NTSmartPlayerSDK.NT_SP_StartPlay(videoctrl[sel].player_handle_);
if (flag == DANIULIVE_RETURN_OK)
{
videoctrl[sel].is_need_get_frame_ = true;
Debug.Log("NT_SP_StartPlay succeed, sel:" + sel);
}
else
{
videoctrl[sel].is_need_get_frame_ = false;
Debug.LogError("NT_SP_StartPlay failed, sel:" + sel);
}
videoctrl[sel].is_playing_ = true;
}
private void StopPlayer(int sel)
{
Debug.Log("StopPlayer++, sel: " + sel);
videoctrl[sel].is_need_get_frame_ = false;
videoctrl[sel].is_need_init_texture_ = false;
if (videoctrl[sel].player_handle_ == IntPtr.Zero)
{
return;
}
UInt32 flag = NTSmartPlayerSDK.NT_SP_StopPlay(videoctrl[sel].player_handle_);
if (flag == DANIULIVE_RETURN_OK)
{
Debug.Log("call NT_SP_StopPlay succeed, sel: " + sel);
}
else
{
Debug.LogError("call NT_SP_StopPlay failed, sel: " + sel);
}
if (!videoctrl[sel].is_recording_ && !videoctrl[sel].is_pulling_)
{
NTSmartPlayerSDK.NT_SP_Close(videoctrl[sel].player_handle_);
videoctrl[sel].player_handle_ = IntPtr.Zero;
}
videoctrl[sel].is_playing_ = false;
}
如果需要转RTMP出去,首先拉流端,需要调用拉流接口:
开始拉流、停止拉流
public void StartPull(int sel)
{
if (videoctrl[sel].is_pulling_)
{
Debug.Log("StartPull, already started.. sel: " + sel);
return;
}
if (!videoctrl[sel].is_playing_ &&
!videoctrl[sel].is_recording_ )
{
if (!OpenPlayerHandle(sel))
{
Debug.LogError("call OpenPlayerHandle failed, sel:" + sel);
return;
}
}
videoctrl[sel].pull_stream_video_data_call_back_ = new SP_SDKPullStreamVideoDataCallBack(OnVideoDataHandle);
videoctrl[sel].pull_stream_audio_data_call_back_ = new SP_SDKPullStreamAudioDataCallBack(OnAudioDataHandle);
NTSmartPlayerSDK.NT_SP_SetPullStreamVideoDataCallBack(videoctrl[sel].player_handle_, IntPtr.Zero, videoctrl[sel].pull_stream_video_data_call_back_);
NTSmartPlayerSDK.NT_SP_SetPullStreamAudioDataCallBack(videoctrl[sel].player_handle_, IntPtr.Zero, videoctrl[sel].pull_stream_audio_data_call_back_);
int is_transcode_aac = 1; //PCMA/PCMU/Speex格式转AAC后 再转发
NTSmartPlayerSDK.NT_SP_SetPullStreamAudioTranscodeAAC(videoctrl[sel].player_handle_, is_transcode_aac);
UInt32 ret = NTSmartPlayerSDK.NT_SP_StartPullStream(videoctrl[sel].player_handle_);
if (NTBaseCodeDefine.NT_ERC_OK != ret)
{
if (!videoctrl[sel].is_playing_ && !videoctrl[sel].is_recording_)
{
NTSmartPlayerSDK.NT_SP_Close(videoctrl[sel].player_handle_);
videoctrl[sel].player_handle_ = IntPtr.Zero;
}
return;
}
videoctrl[sel].is_pulling_ = true;
}
public void StopPull(int sel)
{
if (!videoctrl[sel].is_pulling_)
return;
NTSmartPlayerSDK.NT_SP_StopPullStream(videoctrl[sel].player_handle_);
if (!videoctrl[sel].is_playing_ && !videoctrl[sel].is_recording_)
{
NTSmartPlayerSDK.NT_SP_Close(videoctrl[sel].player_handle_);
videoctrl[sel].player_handle_ = IntPtr.Zero;
}
videoctrl[sel].is_pulling_ = false;
}
拉流设置的时候,需要注意的是,如果是其他比如PCMA、PCMU的,考虑到通用性,可以转AAC后再回调数据上来,此外,拉流或播放的时候,判断是不是已经打开了RTSP URL,确保同一路流在一个实例内,不要开两个实例,占用额外的资源。
此外,关闭播放或拉流的时候,需要判断是不是处于拉流或播放状态,只要二者有一个还没关闭,实例就无法关闭。
开始转推RTMP、停止转推:
public bool StartPush(int sel, String url)
{
if (videoctrl[sel].is_pushing_)
return false;
if (String.IsNullOrEmpty(url))
return false;
if (!OpenPushHandle(sel))
return false;
if (GetPushHandle(sel) == IntPtr.Zero)
return false;
IntPtr push_handle = GetPushHandle(sel);
if (NTBaseCodeDefine.NT_ERC_OK != NTSmartPublisherSDK.NT_PB_SetURL(push_handle, url, IntPtr.Zero))
{
NTSmartPublisherSDK.NT_PB_Close(push_handle);
SetPushHandle(sel, IntPtr.Zero);
return false;
}
if (NTBaseCodeDefine.NT_ERC_OK != NTSmartPublisherSDK.NT_PB_StartPublisher(push_handle, IntPtr.Zero))
{
NTSmartPublisherSDK.NT_PB_Close(push_handle);
SetPushHandle(sel, IntPtr.Zero);
return false;
}
videoctrl[sel].is_pushing_ = true;
return true;
}
public void StopPush(int sel)
{
if (!videoctrl[sel].is_pushing_)
return;
videoctrl[sel].is_pushing_ = false;
lock (videoctrl[sel].push_handle_mutex_)
{
if (videoctrl[sel].push_handle_ == IntPtr.Zero)
return;
NTSmartPublisherSDK.NT_PB_StopPublisher(videoctrl[sel].push_handle_);
NTSmartPublisherSDK.NT_PB_Close(videoctrl[sel].push_handle_);
videoctrl[sel].push_handle_ = IntPtr.Zero;
}
}
音视频数据回调
private void OnVideoDataHandle(IntPtr handle, IntPtr user_data,
UInt32 video_codec_id, IntPtr data, UInt32 size,
IntPtr info, IntPtr reserve)
{
int cur_sel = -1;
for ( int i = 0; i < videoctrl.Length; i++)
{
if(handle == videoctrl[i].player_handle_)
{
cur_sel = i;
break;
}
}
if (cur_sel < 0)
return;
if (!videoctrl[cur_sel].is_pushing_)
return;
if (data == IntPtr.Zero)
return;
if (size < 1)
return;
if (info == IntPtr.Zero)
return;
NT_SP_PullStreamVideoDataInfo video_info = (NT_SP_PullStreamVideoDataInfo)Marshal.PtrToStructure(info, typeof(NT_SP_PullStreamVideoDataInfo));
lock (videoctrl[cur_sel].push_handle_mutex_)
{
if (!videoctrl[cur_sel].is_pushing_)
return;
if (videoctrl[cur_sel].push_handle_ == IntPtr.Zero)
return;
//新接口
NTSmartPublisherSDK.NT_PB_PostVideoEncodedDataV2(videoctrl[cur_sel].push_handle_, video_codec_id,
data, size, video_info.is_key_frame_, video_info.timestamp_, video_info.presentation_timestamp_);
}
}
private void OnAudioDataHandle(IntPtr handle, IntPtr user_data,
UInt32 audio_codec_id, IntPtr data, UInt32 size,
IntPtr info, IntPtr reserve)
{
int cur_sel = -1;
for (int i = 0; i < videoctrl.Length; i++)
{
if (handle == videoctrl[i].player_handle_)
{
cur_sel = i;
break;
}
}
if (cur_sel < 0)
return;
if (!videoctrl[cur_sel].is_pushing_)
return;
if (data == IntPtr.Zero)
return;
if (size < 1)
return;
if (info == IntPtr.Zero)
return;
NT_SP_PullStreamAuidoDataInfo audio_info = (NT_SP_PullStreamAuidoDataInfo)Marshal.PtrToStructure(info, typeof(NT_SP_PullStreamAuidoDataInfo));
lock (videoctrl[cur_sel].push_handle_mutex_)
{
if (!videoctrl[cur_sel].is_pushing_)
return;
if (videoctrl[cur_sel].push_handle_ == IntPtr.Zero)
return;
NTSmartPublisherSDK.NT_PB_PostAudioEncodedData(videoctrl[cur_sel].push_handle_, audio_codec_id, data, size,
audio_info.is_key_frame_, audio_info.timestamp_,
audio_info.parameter_info_, audio_info.parameter_info_size_);
}
}
总结
实际上,Unity环境下的RTSP转RTMP推送,相对RTMP、RTSP播放或推流,对接更容易,因为基本不涉及到页面交互,感兴趣的开发者可以尝试看。