Bootstrap

opencv进行rtsp推流

目标:

将opencv处理后的图像,直接进行rtsp推流。

测试推流

推流本地文件

.\gst-launch-1.0.exe filesrc location=D:\\5.mp4 ! decodebin ! videoconvert ! x264enc ! rtspclientsink location=rtsp://127.0.0.1:8554/live

播放推流内容

.\gst-launch-1.0.exe rtspsrc location=rtsp://127.0.0.1:8554/live ! rtph264depay ! h264parse ! avdec_h264 ! autovideosink

推流实现方案

方案1:使用gstreamer进行推流

//使用代码进行opencv推流
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <opencv2/opencv.hpp>

#define RTSP_SERVER_URL "rtsp://127.0.0.1:8554/live"
#define CAPTURE_WIDTH 640
#define CAPTURE_HEIGHT 480
#define CAPTURE_FPS 30
#define FRAME_INTERVAL_MS (1000 / CAPTURE_FPS)

int main(int argc, char* argv[]) 
{
    //GStreamer
    gst_init(&argc, &argv);
    std::cout << "3e" << std::endl;
    // GStreamer
    GstElement* pipeline = gst_pipeline_new("rtsp-pipeline");
    GstElement* appsrc = gst_element_factory_make("appsrc", "app-source");
    GstElement* videoconvert = gst_element_factory_make("videoconvert", "convert");
    GstElement* x264enc = gst_element_factory_make("x264enc", "encoder");
    GstElement* rtph264pay = gst_element_factory_make("rtph264pay", "payloader");
    GstElement* rtspclientsink = gst_element_factory_make("rtspclientsink", "sink");

    //appsrc
    g_object_set(G_OBJECT(appsrc), "is-live", TRUE, NULL);
    g_object_set(G_OBJECT(appsrc), "format", GST_FORMAT_TIME, NULL);
    g_object_set(G_OBJECT(appsrc), "caps", gst_caps_new_simple("video/x-raw",
        "format", G_TYPE_STRING, "BGR",
        "width", G_TYPE_INT, CAPTURE_WIDTH,
        "height", G_TYPE_INT, CAPTURE_HEIGHT,
        "framerate", GST_TYPE_FRACTION, CAPTURE_FPS, 1,
        NULL), NULL);

    //
    gst_bin_add_many(GST_BIN(pipeline), appsrc, videoconvert, x264enc, rtph264pay, rtspclientsink, NULL);

    // 
    gst_element_link_many(appsrc, videoconvert, x264enc, rtph264pay, rtspclientsink, NULL);

    //rtspclientsink 
    g_object_set(G_OBJECT(rtspclientsink), "location", RTSP_SERVER_URL, NULL);

    //
    gst_element_set_state(pipeline, GST_STATE_PLAYING);

    //
    cv::VideoCapture cap(0);
    if (!cap.isOpened()) {
        std::cerr << "Error: Unable to open camera." << std::endl;
        return -1;
    }

    //
    while (true)
    {
        cv::Mat frame;
        cap >> frame;
        if (frame.empty()) {
            std::cerr << "Warning: Empty frame." << std::endl;
            continue;
        }

        //打印精确到ms的时间
        std::chrono::system_clock::time_point now = std::chrono::system_clock::now();
        auto t = std::chrono::system_clock::to_time_t(now);
        auto tp = std::chrono::system_clock::from_time_t(t);
        auto ms = std::chrono::duration_cast<std::chrono::milliseconds>(now - tp).count();
        auto in_time_t = std::chrono::system_clock::to_time_t(now);
        std::cout << std::put_time(std::localtime(&in_time_t), "%Y-%m-%d %X") << "." << ms << std::endl;


        //
        GstBuffer* buffer = gst_buffer_new_allocate(NULL, frame.total() * frame.elemSize(), NULL);
        GstMapInfo map;
        gst_buffer_map(buffer, &map, GST_MAP_WRITE);
        memcpy(map.data, frame.data, frame.total() * frame.elemSize());
        gst_buffer_unmap(buffer, &map);

        GstFlowReturn ret;
        g_signal_emit_by_name(appsrc, "push-buffer", buffer, &ret);
        gst_buffer_unref(buffer);

        //
        g_usleep(FRAME_INTERVAL_MS * 1000);
    }

    //
    gst_element_set_state(pipeline, GST_STATE_NULL);
    gst_object_unref(pipeline);

    return 0;
}

方案2:使用ffmpeg推流

#include "rtspencoder.h"
#include <QCoreApplication>
#include <QDebug>

RTSPEncoder::RTSPEncoder(const char* rtspUrl, int outputWidth, int outputHeight, QObject* parent)
    : QThread(parent), m_rtspUrl(rtspUrl), m_outputWidth(outputWidth), m_outputHeight(outputHeight)
{

}

RTSPEncoder::~RTSPEncoder()
{
    cleanup();
}

int RTSPEncoder::initialize()
{
    avformat_network_init();

    // Initialize output context
    m_outFormatCtx = nullptr;
    avformat_alloc_output_context2(&m_outFormatCtx, nullptr, "rtsp", m_rtspUrl.c_str());
    if (!m_outFormatCtx) 
    {
        std::cerr << "Error: Failed to allocate output context\n";
        return -1;
    }

    // Find encoder for H264
    m_codec = (AVCodec*)avcodec_find_encoder(AV_CODEC_ID_H264);
    if (!m_codec) {
        std::cerr << "Error: Failed to find H264 encoder\n";
        return -1;
    }

    // Add video stream to output context
    m_outStream = avformat_new_stream(m_outFormatCtx, m_codec);
    if (!m_outStream) {
        std::cerr << "Error: Failed to create new stream\n";
        return -1;
    }

    // Initialize codec context for the encoder
    m_codecCtx = avcodec_alloc_context3(m_codec);
    if (!m_codecCtx) {
        std::cerr << "Error: Failed to allocate codec context\n";
        return -1;
    }
    m_codecCtx->codec_id = AV_CODEC_ID_H264;
    m_codecCtx->codec_type = AVMEDIA_TYPE_VIDEO;
    m_codecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
    m_codecCtx->width = m_outputWidth;
    m_codecCtx->height = m_outputHeight;
    m_codecCtx->time_base = { 1, 25 }; // assuming 25fps
    m_codecCtx->bit_rate = 700000;
    m_codecCtx->gop_size = 10;
    m_codecCtx->max_b_frames = 0;
    m_codecCtx->flags = AV_CODEC_FLAG_GLOBAL_HEADER | AV_CODEC_FLAG_LOW_DELAY;

    // Set codec options
    AVDictionary* codecOptions = nullptr;
    av_dict_set(&codecOptions, "preset", "ultrafast", 0);
    av_dict_set(&codecOptions, "tune", "zerolatency", 0);
    

    // Open the encoder codecOptions 设置到m_codecCtx
    if (avcodec_open2(m_codecCtx, m_codec, &codecOptions) < 0) {
        std::cerr << "Error: Failed to open encoder\n";
        return -1;
    }

    // Copy codec parameters to output stream
    avcodec_parameters_from_context(m_outStream->codecpar, m_codecCtx);

    // Write stream header
    avformat_write_header(m_outFormatCtx, nullptr);

    m_frame.create(m_outputHeight, m_outputWidth, CV_8UC3);
    m_frame.setTo(0);
    m_frameCount = 0;

    m_avFrame = av_frame_alloc();

    return 0;
}

void RTSPEncoder::encodeFrameAndSend()
{
    while (1)
    {
        //阻塞,等待AddFrame
		{
			std::unique_lock<std::mutex> lock(mtx);

			while (!m_isReady) {
				m_cv.wait(lock);
			}
            m_isReady = false;


			// Convert OpenCV Mat to AVFrame
			cv::cvtColor(m_frame, m_im420, cv::COLOR_RGB2YUV_I420);

			m_avFrame->data[0] = m_im420.data;
			m_avFrame->data[1] = m_avFrame->data[0] + m_codecCtx->width * m_codecCtx->height;
			m_avFrame->data[2] = m_avFrame->data[1] + m_codecCtx->width * m_codecCtx->height / 4;
			m_avFrame->linesize[0] = m_codecCtx->width;
			m_avFrame->linesize[1] = m_codecCtx->width / 2;
			m_avFrame->linesize[2] = m_codecCtx->width / 2;
			m_avFrame->width = m_codecCtx->width;
			m_avFrame->height = m_codecCtx->height;
			m_avFrame->format = AV_PIX_FMT_YUV420P;
            //m_frameCount += 30;
			m_avFrame->pts = m_frameCount++;

			auto ret = avcodec_send_frame(m_codecCtx, m_avFrame);
			if (ret < 0)
			{
				std::cerr << "Error: Failed to send frame for encoding\n";
				return;
			}

			AVPacket pkt;
			av_init_packet(&pkt);
			while (ret >= 0)
			{
				ret = avcodec_receive_packet(m_codecCtx, &pkt);
				if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
				{
					break;
				}
				else if (ret < 0)
				{
					std::cout << "Error: Failed to receive encoded packet\n";
					continue;
				}
				pkt.stream_index = m_outStream->index;
				av_packet_rescale_ts(&pkt, m_codecCtx->time_base, m_outStream->time_base);
				ret = av_interleaved_write_frame(m_outFormatCtx, &pkt);
				av_packet_unref(&pkt);

				if (ret < 0)
				{
					std::cout << "Error: Failed to write packet to output stream\n";
					continue;
				}
			}
		}
	}
}


void RTSPEncoder::AddFrame(cv::Mat& imGBR, int nFrame)
{
    if (imGBR.cols != m_outputWidth || imGBR.rows != m_outputHeight)
        return;

    //阻塞等待encodeFrameAndSend
    {
        std::lock_guard<std::mutex> lock(mtx);
        memcpy(m_frame.data, imGBR.data, imGBR.cols * imGBR.rows * 3);
        m_frameCount = nFrame;
        m_isReady = true;
    }
    m_cv.notify_one();
    
}



void RTSPEncoder::cleanup()
{
    // Write trailer
    av_write_trailer(m_outFormatCtx);


    // Cleanup
    if(m_codecCtx)
        avcodec_free_context(&m_codecCtx);
    if(m_avFrame)
        av_frame_free(&m_avFrame);
    if(m_outFormatCtx)
        avformat_free_context(m_outFormatCtx);
}

方案3:使用opencv的gstream后端进行推流

使用这种方式,实现简单,但在测试时会有一定的延迟。

void push()
{
    int codec = cv::VideoWriter::fourcc('H', 'E', 'V', 'C');
    m_writer.open(push_str, codec, fps, cv::Size(width, height));

    cv::Mat tmp;
    while (true)
    {
        {
            std::unique_lock<std::mutex> lk(proc2push_mtx);
            proc2push_cv.wait(lk);
            tmp = img_push.clone();
        }

        if (!m_writer.isOpened())
        {
            int codec = cv::VideoWriter::fourcc('H', 'E', 'V', 'C');
            m_writer.open(push_str, codec, fps, cv::Size(width, height));
        }

        m_writer.write(tmp);
    }
}

其他测试代码

int main1()  
{  
    //std::cout << cv::getBuildInformation() << std::endl;  //gst-launch-1.0 
    //usb
    //gst-launch-1.0 v4l2src device=/dev/video0 ! image/jpeg,width=1920,height=1080,framerate=30/1 ! jpegdec ! videoconvert ! xvimagesink
    //gst-launch-1.0 v4l2src device=/dev/video0 ! image/jpeg,format=MJPG,width=1280,height=720,framerate=30/1 ! nvv4l2decoder mjpeg=1 ! nvvidconv ! autovideosink
    //cv::VideoCapture capture(0);  //not ok blur
    //"v4l2src device=/dev/video0 ! image/jpeg,width=1920,height=1080,framerate=30/1 ! jpegdec ! videoconvert ! appsink"
    std::string pipeline = "v4l2src device=/dev/video0 ! image/jpeg,width=1920,height=1080,framerate=30/1 ! nvv4l2decoder mjpeg=1 ! nvvidconv ! appsink";
    cv::VideoCapture capture(pipeline,cv::CAP_GSTREAMER);  //ok

    //rtsp
    //gst-launch-1.0 rtspsrc location='rtsp://admin:[email protected]/cam/realmonitor?channel=1&subtype=0' latency=100 ! rtph264depay ! h264parse ! omxh264dec ! nvvidconv ! video/x-raw, width=1920, height=1080, format=BGRx ! videoconvert ! xvimagesink
    //gst-launch-1.0 rtspsrc location='rtsp://admin:[email protected]/cam/realmonitor?channel=1&subtype=0' latency=100 ! videoconvert ! xvimagesink

    //cv::VideoCapture capture("rtsp://admin:[email protected]/cam/realmonitor?channel=1&subtype=0",cv::CAP_FFMPEG); //not gstreamer 2s delay
    //std::string pipeline = "rtspsrc location='rtsp://admin:[email protected]/cam/realmonitor?channel=1&subtype=0' latency=100 ! rtph264depay ! h264parse ! omxh264dec ! nvvidconv ! video/x-raw, width=1920, height=1080, format=BGRx !videoconvert ! appsink";

    //std::string pipeline = "rtsp://admin:[email protected]/cam/realmonitor?channel=1&subtype=0";
    //cv::VideoCapture capture(pipeline, cv::CAP_GSTREAMER);
    //std::string pipeline = "rtspsrc location=rtsp://admin:[email protected]/cam/realmonitor?channel=1&subtype=0 ! decodebin ! videoconvert ! appsink max-buffers=1 drop=true";
    // cv::VideoCapture capture;
    // if(capture.open("rtsp://admin:[email protected]/cam/realmonitor?channel=1&subtype=0"))  //, cv::CAP_GSTREAMER
    // {
    //     std::cout << "open video Failed !" << std::endl;  
    //     return -1;  
    // }
    
    if (!capture.isOpened()) 
    {  
        std::cout << "Read video Failed !" << std::endl;  
        return -1;  
    }  

    // cv::VideoWriter m_writer;
    // int codec = cv::VideoWriter::fourcc('H', '2', '6', '4');
    // double fps = 25.0;
    // int width = 1920;
    // int height = 1080;  //nvv4l2h264enc
    // m_writer.open("appsrc ! videoconvert ! x264enc ! flvmux ! rtmpsink location=rtmp://192.168.18.218:1935/live", codec, fps, cv::Size(width, height));  //client
    // if (!m_writer.isOpened())
    // {
    //     std::cout << "Error: VideoWriter is not opened!" << std::endl;
    //     return -1;
    // }
  
    cv::Mat frame;  
    cv::namedWindow("video test");  
  
    int frame_num = capture.get(cv::CAP_PROP_FRAME_COUNT);  
    std::cout << "total frame number is: " << frame_num << std::endl;  
  
    //for (int i = 0; i < frame_num - 1; ++i)  
    while(1)
    {  
        capture >> frame;  
        //m_writer.write(frame);
        //capture.read(frame); 第二种方式  
        imshow("video test", frame);  
        if (cv::waitKey(30) == 'q')  
        {  
            break;  
        }  
    }  
  
    cv::destroyWindow("video test");  
    capture.release();  
    return 0;  
}  
 gst-launch-1.0 -v filesrc location=D:/1.mp4 ! decodebin ! x264enc ! rtph264pay ! udpsink host=127.0.0.1 port=8554

 gst-launch-1.0 -v rtspsrc location=rtsp://127.0.0.1:10054/live/Fs4FsmKSR ! rtph264depay !  h264parse ! avdec_h264 ! videoconvert ! video/x-raw, width=3840, height=2100, format=BGRx ! videoconvert !  ximagesink sync=false

gst-launch-1.0 -v videotestsrc ! video/x-raw, format=BGRx ! autovideosink

 gst-launch-1.0 -v rtspsrc location=rtsp://127.0.0.1:10054/live/Fs4FsmKSR ! rtph264depay !  h264parse ! avdec_h264 ! videoconvert ! video/2.mp4, format=BGRx ! videoconvert !  ximagesink sync=false

gst-launch-1.0 -v rtspsrc location=rtsp://127.0.0.1:10054/live/Fs4FsmKSR ! decodebin ! videoconvert ! autovideosink

gst-launch-1.0 -v appsrc ! videoconvert ! video/x-raw,format=I420 ! nvvidconv ! video/x-raw(memory:NVMM),format=I420 ! nvv4l2h264enc insert-sps-pps=1 ! video/x-h264,stream-format=byte-stream ! rtspclientsink location=rtsp://localhost:8554/mystream

gst-launch-1.0 -v appsrc name=mysource is-live=true block=true format=GST_FORMAT_TIME \
    ! videoconvert \
    ! video/x-raw,format=NV12 \
    ! nvv4l2h264enc \
    ! video/x-h264,stream-format=byte-stream \
    ! h264parse \
    ! rtspclientsink location=rtsp://localhost:8554/mystream
;