ffmpeg推流时与服务器断开后的自动重连功能的实现
生活随笔
收集整理的這篇文章主要介紹了
ffmpeg推流时与服务器断开后的自动重连功能的实现
小編覺得挺不錯的,現在分享給大家,幫大家做個參考.
當我們使用ffmpeg進行視頻推流的時候,流媒體服務器與推流終端一直連接的時候,推流是成功的,但是如果服務器重啟,就會出現推流一直失敗的問題,av_interleaved_write_frame返回值-32,根據ffmpeg對返回值的解釋:
-32:管道阻塞:這個一般是socket錯誤,推流的服務器斷開了socket鏈接,導致發(fā)送失敗。
推流程序如果沒有斷開重連功能的話,就只能關掉程序,重新啟動來重新連接服務器解決問題,但這顯然不是解決問題的辦法,我們期望的辦法是程序能夠在推流失敗后能夠自動重連服務器,其實現邏輯如下:
1.啟動Init成功,開始推流
2.推流失敗,調用stop,清理調用的FFmpeg的環(huán)境。
3.重新啟動Init,成功后開始推流
ffmpeg關于rtmp推流的代碼,網上有很多,我把這些代碼修改了下,封裝成一個類,名字叫PushRtmp, 其有三個函數:
1.Init,初始化連接服務器
2.Push,推流
3.Stop, 停止推流,清理環(huán)境
二話不說,上代碼。
頭文件:
#pragma once #include <opencv2/opencv.hpp> #include <string> #ifdef _WIN32 // Windows extern "C" { #include "libavformat/avformat.h" #include "libavutil/mathematics.h" #include "libavutil/time.h" }; #else // Linux... #ifdef __cplusplus extern "C" { #endif #include <libavformat/avformat.h> #include <libavutil/mathematics.h> #include <libavutil/time.h> #include <libswscale/swscale.h> #ifdef __cplusplus }; #endif #endifclass PushRtmp { public:static PushRtmp* getInst();bool Init( std::string url, int width, int height, int fps );void Push( cv::Mat& image );void Stop();private:static PushRtmp* instance;// rtmp服務地址std::string rtmp_url_;int width_;int height_;int fps_;// 輸出的數據結構AVFrame* yuv_ = NULL;// 像素格式轉換上下文SwsContext* vsc_ = NULL;int vpts = 0;// 編碼器上下文AVCodecContext* vc_ = NULL;// rtmp flv 封裝器AVFormatContext* ic_ = NULL;AVPacket pack_;AVStream* vs_ = NULL; };cpp文件:
#include "push_rtmp.hpp" #include <chrono> #include <exception> #include <thread>PushRtmp* PushRtmp::instance = nullptr;PushRtmp* PushRtmp::getInst() {if ( instance == nullptr ){instance = new PushRtmp();}return instance; }// 初始化函數 // url--推流的地址 // width --幀的寬度 // height --幀的高度 bool PushRtmp::Init( std::string url, int width, int height, int fps ) {rtmp_url_ = url;width_ = width;height_ = height;fps_ = fps;//注冊所有的編解碼器avcodec_register_all();//注冊所有的封裝器av_register_all();//注冊所有網絡協議avformat_network_init();try{int inWidth = width;int inHeight = height;std::cout <<"+++++++++"<< inWidth << inHeight<<std::endl;/// 2 初始化格式轉換上下文vsc_ = sws_getCachedContext( vsc_, inWidth, inHeight, AV_PIX_FMT_BGR24, //源寬、高、像素格式inWidth, inHeight, AV_PIX_FMT_YUV420P, //目標寬、高、像素格式SWS_BICUBIC, // 尺寸變化使用算法0, 0, 0 );if ( !vsc_ ){printf( "sws_getCachedContext failed!" );return false;}/// 3 初始化輸出的數據結構yuv_ = av_frame_alloc();yuv_->format = AV_PIX_FMT_YUV420P;yuv_->width = inWidth;yuv_->height = inHeight;yuv_->pts = 0;//分配yuv空間int ret = av_frame_get_buffer( yuv_, 32 );if ( ret != 0 ){char buf[ 1024 ] = { 0 };av_strerror( ret, buf, sizeof( buf ) - 1 );printf( buf );return false;}/// 4 初始化編碼上下文// a 找到編碼器AVCodec* codec = avcodec_find_encoder( AV_CODEC_ID_H264 );if ( !codec ){printf( "Can`t find h264 encoder!" );return false;}// b 創(chuàng)建編碼器上下文vc_ = avcodec_alloc_context3( codec );if ( !vc_ ){printf( "avcodec_alloc_context3 failed!" );return false;}// c 配置編碼器參數vc_->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; //全局參數vc_->codec_id = codec->id;vc_->thread_count = 8;vc_->bit_rate = 50 * 1024 * 8; //壓縮后每秒視頻的bit位大小 50kBvc_->width = inWidth;vc_->height = inHeight;vc_->time_base = { 1, fps };vc_->framerate = { fps, 1 };//畫面組的大小,多少幀一個關鍵幀vc_->gop_size = 50;vc_->max_b_frames = 0;vc_->pix_fmt = AV_PIX_FMT_YUV420P;// d 打開編碼器上下文ret = avcodec_open2( vc_, 0, 0 );if ( ret != 0 ){char buf[ 1024 ] = { 0 };av_strerror( ret, buf, sizeof( buf ) - 1 );printf( buf );return false;}std::cout << "avcodec_open2 success!" << std::endl;/// 5 輸出封裝器和視頻流配置// a 創(chuàng)建輸出封裝器上下文ret = avformat_alloc_output_context2( &ic_, 0, "flv", url.c_str() );if ( ret != 0 ){char buf[ 1024 ] = { 0 };av_strerror( ret, buf, sizeof( buf ) - 1 );printf( buf );return false;}// b 添加視頻流vs_ = avformat_new_stream( ic_, NULL );if ( !vs_ ){printf( "avformat_new_stream failed" );return false;}vs_->codecpar->codec_tag = 0;//從編碼器復制參數avcodec_parameters_from_context( vs_->codecpar, vc_ );av_dump_format( ic_, 0, url.c_str(), 1 );///打開rtmp 的網絡輸出IOret = avio_open( &ic_->pb, url.c_str(), AVIO_FLAG_WRITE );if ( ret != 0 ){char buf[ 1024 ] = { 0 };av_strerror( ret, buf, sizeof( buf ) - 1 );printf( buf );return false;}//寫入封裝頭ret = avformat_write_header( ic_, NULL );if ( ret != 0 ){char buf[ 1024 ] = { 0 };av_strerror( ret, buf, sizeof( buf ) - 1 );printf( buf );return false;}}catch ( std::exception& ex ){// if (cam.isOpened())// cam.release();if ( vsc_ ){sws_freeContext( vsc_ );vsc_ = NULL;}if ( vc_ ){avio_closep( &ic_->pb );avcodec_free_context( &vc_ );}return false;// std::cerr << ex.what() << endl;}return true; }void PushRtmp::Push( cv::Mat& image ) {//輸入的數據結構uint8_t* indata[ AV_NUM_DATA_POINTERS ] = { 0 };indata[ 0 ] = image.data;int insize[ AV_NUM_DATA_POINTERS ] = { 0 };//一行(寬)數據的字節(jié)數insize[ 0 ] = image.cols * image.elemSize();int h = sws_scale( vsc_, indata, insize, 0, image.rows, //源數據yuv_->data, yuv_->linesize );if ( h <= 0 ){return;}/// h264編碼yuv_->pts = vpts;vpts++;int ret = avcodec_send_frame( vc_, yuv_ );if ( ret != 0 )return;ret = avcodec_receive_packet( vc_, &pack_ );if ( ret != 0 || pack_.size > 0 ){// cout << "*" << pack.size << flush;}else{return;}//推流pack_.pts = av_rescale_q( pack_.pts, vc_->time_base, vs_->time_base );pack_.dts = av_rescale_q( pack_.dts, vc_->time_base, vs_->time_base );pack_.duration = av_rescale_q( pack_.duration, vc_->time_base, vs_->time_base );ret = av_interleaved_write_frame( ic_, &pack_ );if ( ret == 0 ){// std::cout << "#" << flush;}else{std::cout << "push rtmp failed error code:"<< ret;if ( ret == -32 ){std::cout <<"Server disconnected, start reconnect..." ;Stop();Init( rtmp_url_, width_, height_, fps_ );while ( true ){if ( Init( rtmp_url_, width_, height_, fps_ ) )break;std::this_thread::sleep_for( std::chrono::milliseconds( 5000 ) );}}} }//停止推流 void PushRtmp::Stop() {if ( vsc_ ){sws_freeContext( vsc_ );vsc_ = NULL;}if ( vc_ ){avio_closep( &ic_->pb );avcodec_free_context( &vc_ );}avformat_free_context( ic_ );// avformat_close_input(&ifmt_ctx); }使用示范代碼:
#include "push_rtmp.hpp" #include <chrono> #include <iostream> #include <opencv2/highgui.hpp> #include <thread>using namespace cv;int main( int argc, char* argv[] ) {VideoCapture cam;Mat frame;cam.open( 0 );if ( !cam.isOpened() ){throw exception("cam open failed!");}namedWindow( "video" );int inWidth = cam.get( CAP_PROP_FRAME_WIDTH );int inHeight = cam.get( CAP_PROP_FRAME_HEIGHT );int fps = cam.get( CAP_PROP_FPS );fps = 25;while ( true ){if ( PushRtmp::getInst()->Init( "rtmp://192.168.123.32/live/24", inWidth, inHeight, fps ) )break;std::this_thread::sleep_for( std::chrono::milliseconds( 1000 ) );}for ( ;; ){///讀取rtsp視頻幀,解碼視頻幀if ( !cam.grab() ){continue;}/// yuv轉換為rgbif ( !cam.retrieve( frame ) ){continue;}PushRtmp::getInst()->Push( frame );}return 0; }總結
以上是生活随笔為你收集整理的ffmpeg推流时与服务器断开后的自动重连功能的实现的全部內容,希望文章能夠幫你解決所遇到的問題。
- 上一篇: OpenCV文字绘制支持中文显示
- 下一篇: OpenCV中像素逻辑运算:逻辑与运算