技術背景
我們在做Android平臺RTSP轉發模組的時候,有公司提出來這樣的技術需求,他們希望拉取外部RTSP攝像頭的流,然後提供個輕量級RTSP服務,讓內網其他終端過來拉流。實際上,這塊,大牛直播SDK前幾年就已經實現。
技術實現
拉流的話,很好理解,其實就是播放端,把未解碼的資料,直接回調上來,如果需要預覽,直接底層繪製即可。單純的資料回撥,對效能消耗不大。
回撥上來的資料,可以作為輕量級RTSP服務的資料來源(投遞編碼後資料),推送端,只要啟動RTSP服務,然後釋出RTSP流即可。
先說拉流,開始拉流、停止拉流實現:
/* * SmartPlayer.java * Author: daniusdk.com */ private boolean StartPull() { if ( isPulling ) return false; if(!isPlaying) { if (!OpenPullHandle()) return false; } libPlayer.SmartPlayerSetAudioDataCallback(player_handle_, new PlayerAudioDataCallback(stream_publisher_)); libPlayer.SmartPlayerSetVideoDataCallback(player_handle_, new PlayerVideoDataCallback(stream_publisher_)); int is_pull_trans_code = 1; libPlayer.SmartPlayerSetPullStreamAudioTranscodeAAC(player_handle_, is_pull_trans_code); int startRet = libPlayer.SmartPlayerStartPullStream(player_handle_); if (startRet != 0) { Log.e(TAG, "Failed to start pull stream!"); if(!isPlaying) { releasePlayerHandle(); } return false; } isPulling = true; return true; } private void StopPull() { if ( !isPulling ) return; isPulling = false; if (null == libPlayer || 0 == player_handle_) return; libPlayer.SmartPlayerStopPullStream(player_handle_); if ( !isPlaying) { releasePlayerHandle(); } }
音訊回撥處理:
class PlayerAudioDataCallback implements NTAudioDataCallback { private WeakReference<LibPublisherWrapper> publisher_; private int audio_buffer_size = 0; private int param_info_size = 0; private ByteBuffer audio_buffer_ = null; private ByteBuffer parameter_info_ = null; public PlayerAudioDataCallback(LibPublisherWrapper publisher) { if (publisher != null) publisher_ = new WeakReference<>(publisher); } @Override public ByteBuffer getAudioByteBuffer(int size) { //Log.i("getAudioByteBuffer", "size: " + size); if( size < 1 ) { return null; } if ( size <= audio_buffer_size && audio_buffer_ != null ) { return audio_buffer_; } audio_buffer_size = size + 512; audio_buffer_size = (audio_buffer_size+0xf) & (~0xf); audio_buffer_ = ByteBuffer.allocateDirect(audio_buffer_size); // Log.i("getAudioByteBuffer", "size: " + size + " buffer_size:" + audio_buffer_size); return audio_buffer_; } @Override public ByteBuffer getAudioParameterInfo(int size) { //Log.i("getAudioParameterInfo", "size: " + size); if(size < 1) { return null; } if ( size <= param_info_size && parameter_info_ != null ) { return parameter_info_; } param_info_size = size + 32; param_info_size = (param_info_size+0xf) & (~0xf); parameter_info_ = ByteBuffer.allocateDirect(param_info_size); //Log.i("getAudioParameterInfo", "size: " + size + " buffer_size:" + param_info_size); return parameter_info_; } public void onAudioDataCallback(int ret, int audio_codec_id, int sample_size, int is_key_frame, long timestamp, int sample_rate, int channel, int parameter_info_size, long reserve) { //Log.i("onAudioDataCallback", "ret: " + ret + ", audio_codec_id: " + audio_codec_id + ", sample_size: " + sample_size + ", timestamp: " + timestamp + // ",sample_rate:" + sample_rate); if ( audio_buffer_ == null) return; LibPublisherWrapper publisher = publisher_.get(); if (null == publisher) return; if (!publisher.is_publishing()) return; audio_buffer_.rewind(); publisher.PostAudioEncodedData(audio_codec_id, audio_buffer_, sample_size, is_key_frame, timestamp, parameter_info_, parameter_info_size); } }
影片回撥資料:
class PlayerVideoDataCallback implements NTVideoDataCallback { private WeakReference<LibPublisherWrapper> publisher_; private int video_buffer_size = 0; private ByteBuffer video_buffer_ = null; public PlayerVideoDataCallback(LibPublisherWrapper publisher) { if (publisher != null) publisher_ = new WeakReference<>(publisher); } @Override public ByteBuffer getVideoByteBuffer(int size) { //Log.i("getVideoByteBuffer", "size: " + size); if( size < 1 ) { return null; } if ( size <= video_buffer_size && video_buffer_ != null ) { return video_buffer_; } video_buffer_size = size + 1024; video_buffer_size = (video_buffer_size+0xf) & (~0xf); video_buffer_ = ByteBuffer.allocateDirect(video_buffer_size); // Log.i("getVideoByteBuffer", "size: " + size + " buffer_size:" + video_buffer_size); return video_buffer_; } public void onVideoDataCallback(int ret, int video_codec_id, int sample_size, int is_key_frame, long timestamp, int width, int height, long presentation_timestamp) { //Log.i("onVideoDataCallback", "ret: " + ret + ", video_codec_id: " + video_codec_id + ", sample_size: " + sample_size + ", is_key_frame: "+ is_key_frame + ", timestamp: " + timestamp + // ",presentation_timestamp:" + presentation_timestamp); if ( video_buffer_ == null) return; LibPublisherWrapper publisher = publisher_.get(); if (null == publisher) return; if (!publisher.is_publishing()) return; video_buffer_.rewind(); publisher.PostVideoEncodedData(video_codec_id, video_buffer_, sample_size, is_key_frame, timestamp, presentation_timestamp); } }
啟動RTSP服務:
//啟動/停止RTSP服務 class ButtonRtspServiceListener implements View.OnClickListener { public void onClick(View v) { if (isRTSPServiceRunning) { stopRtspService(); btnRtspService.setText("啟動RTSP服務"); btnRtspPublisher.setEnabled(false); isRTSPServiceRunning = false; return; } Log.i(TAG, "onClick start rtsp service.."); rtsp_handle_ = libPublisher.OpenRtspServer(0); if (rtsp_handle_ == 0) { Log.e(TAG, "建立rtsp server例項失敗! 請檢查SDK有效性"); } else { int port = 28554; if (libPublisher.SetRtspServerPort(rtsp_handle_, port) != 0) { libPublisher.CloseRtspServer(rtsp_handle_); rtsp_handle_ = 0; Log.e(TAG, "建立rtsp server埠失敗! 請檢查埠是否重複或者埠不在範圍內!"); } if (libPublisher.StartRtspServer(rtsp_handle_, 0) == 0) { Log.i(TAG, "啟動rtsp server 成功!"); } else { libPublisher.CloseRtspServer(rtsp_handle_); rtsp_handle_ = 0; Log.e(TAG, "啟動rtsp server失敗! 請檢查設定的埠是否被佔用!"); } btnRtspService.setText("停止RTSP服務"); btnRtspPublisher.setEnabled(true); isRTSPServiceRunning = true; } } }
釋出RTSP流:
//釋出/停止RTSP流 class ButtonRtspPublisherListener implements View.OnClickListener { public void onClick(View v) { if (stream_publisher_.is_rtsp_publishing()) { stopRtspPublisher(); btnRtspPublisher.setText("釋出RTSP流"); btnGetRtspSessionNumbers.setEnabled(false); btnRtspService.setEnabled(true); return; } Log.i(TAG, "onClick start rtsp publisher.."); InitAndSetConfig(); String rtsp_stream_name = "stream1"; stream_publisher_.SetRtspStreamName(rtsp_stream_name); stream_publisher_.ClearRtspStreamServer(); stream_publisher_.AddRtspStreamServer(rtsp_handle_); if (!stream_publisher_.StartRtspStream()) { stream_publisher_.try_release(); Log.e(TAG, "呼叫釋出rtsp流介面失敗!"); return; } btnRtspPublisher.setText("停止RTSP流"); btnGetRtspSessionNumbers.setEnabled(true); btnRtspService.setEnabled(false); } }
獲取RTSP Session會話數:
//獲取RTSP會話數 class ButtonGetRtspSessionNumbersListener implements View.OnClickListener { public void onClick(View v) { if (libPublisher != null && rtsp_handle_ != 0) { int session_numbers = libPublisher.GetRtspServerClientSessionNumbers(rtsp_handle_); Log.i(TAG, "GetRtspSessionNumbers: " + session_numbers); PopRtspSessionNumberDialog(session_numbers); } } }
總結
因為RTSP外部拉流,不需要解碼,配合大牛直播SDK的SmartPlayer播放器,延遲和直連的,差別不大,整體毫秒級,延遲非常低,巡檢或監控類場景,都可以達到相應的技術指標。如果需要二次水印,也可以回撥解碼後的yuv或rgb資料,推送端新增二次文字或圖片水印後,編碼輸出,這種在一些合成類場景,比如智慧煤礦、管廊隧道等行業,非常適用,感興趣的開發者,可以單獨跟我探討。