环境需要安装opencv和gstreamer

Ubuntu20.04安装gstreamer

sudo apt install libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev
sudo apt-get install libgtk-3-dev
pkg-config --modversion gtk+-3.0

Ubuntu20.04安装opencv

sudo apt install libopencv-dev

主视频流接收并转换为opencv的cv mat

#include <gst/gst.h>
#include <gst/video/video.h>
#include <opencv2/opencv.hpp>

static GstFlowReturn on_new_sample_from_sink(GstElement *sink, gpointer data) {  
    GstSample *sample = nullptr;  
    GstBuffer *buffer = nullptr;  
    GstMapInfo map;  
    cv::Mat frame;  

    g_signal_emit_by_name(sink, "pull-sample", &sample);  
    if (sample) {  
        buffer = gst_sample_get_buffer(sample);  
        gst_buffer_map(buffer, &map, GST_MAP_READ);  

        GstCaps *caps = gst_sample_get_caps(sample);
        if (caps) {
            GstVideoInfo info;
            if (gst_video_info_from_caps(&info, caps)) {
                gint width = info.width;
                gint height = info.height;
                gint stride = GST_VIDEO_INFO_PLANE_STRIDE(&info, 0);

                // 创建Y分量
                cv::Mat yPlane(height, width, CV_8UC1, map.data, stride);

                // 获取U和V分量的起始地址
                gpointer uPlane = (guchar*)map.data + stride * height;
                gpointer vPlane = uPlane + (stride * height / 4);

                // 创建U和V分量
                cv::Mat uPlaneMat(height / 2, width / 2, CV_8UC1, (void*)uPlane, stride / 2);
                cv::Mat vPlaneMat(height / 2, width / 2, CV_8UC1, (void*)vPlane, stride / 2);

                // 将YUV420P转换为BGR
                cv::Mat bgrFrame(height, width, CV_8UC3);
                cv::cvtColor(cv::Mat(height * 3 / 2, width, CV_8UC1, map.data, stride), bgrFrame, cv::COLOR_YUV2BGR_I420);

                // 现在bgrFrame包含了转换后的BGR图像
                // 你可以保存或显示bgrFrame
                // 例如保存为图片
                static int frame_count = 0;
                std::string filename = "frame_" + std::to_string(frame_count) + ".png";
                cv::imwrite(filename, bgrFrame);
                frame_count++; // 增加帧计数,以便为下一帧生成不同的文件名
            }
        }

        gst_buffer_unmap(buffer, &map);  
        gst_sample_unref(sample);  
    }  

    return GST_FLOW_OK;  
}

static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data)
{
    GMainLoop *loop = (GMainLoop *)data;

    switch (GST_MESSAGE_TYPE(msg))
    {
    case GST_MESSAGE_EOS:
        g_print("End of stream\n");
        g_main_loop_quit(loop);
        break;
    case GST_MESSAGE_ERROR:
    {
        gchar *debug;
        GError *error;

        gst_message_parse_error(msg, &error, &debug);
        g_printerr("Error: %s\n", error->message);
        g_error_free(error);
        g_free(debug);

        g_main_loop_quit(loop);
        break;
    }
    default:
        break;
    }

    return TRUE;
}

int main(int argc, char *argv[])
{
    gst_init(&argc, &argv);

    GstElement *pipeline = gst_parse_launch("udpsrc uri=udp://192.168.17.51:5600 ! application/x-rtp, media=video, clock-rate=90000, encoding-name=H264 ! rtph264depay ! avdec_h264 ! videoconvert ! videoscale ! appsink name=sink emit-signals=true", NULL);
    GstElement *sink = gst_bin_get_by_name(GST_BIN(pipeline), "sink");

    g_signal_connect(sink, "new-sample", G_CALLBACK(on_new_sample_from_sink), NULL);

    GstBus *bus = gst_element_get_bus(pipeline);
    GMainLoop *loop = g_main_loop_new(NULL, FALSE);

    gst_bus_add_watch(bus, bus_call, loop);

    gst_element_set_state(pipeline, GST_STATE_PLAYING);
    g_main_loop_run(loop);

    gst_element_set_state(pipeline, GST_STATE_NULL);
    gst_object_unref(bus);
    gst_object_unref(pipeline);
    g_main_loop_unref(loop);

    return 0;
}

运行命令g++ -o video_stream video_stream.cpp pkg-config --cflags --libs gstreamer-1.0 pkg-config --cflags --libs gstreamer-video-1.0 pkg-config --cflags --libs opencv4

下视视频流获取并转换为opencv的cv mat

#include <gst/gst.h>
#include <gst/video/video.h>
#include <opencv2/opencv.hpp>

static GstFlowReturn on_new_sample_from_sink(GstElement *sink, gpointer data) {  
    GstSample *sample = nullptr;  
    GstBuffer *buffer = nullptr;  
    GstMapInfo map;  
    cv::Mat frame;  

    g_signal_emit_by_name(sink, "pull-sample", &sample);  
    if (sample) {  
        buffer = gst_sample_get_buffer(sample);  
        gst_buffer_map(buffer, &map, GST_MAP_READ);  

        GstCaps *caps = gst_sample_get_caps(sample);
        if (caps) {
            GstVideoInfo info;
            if (gst_video_info_from_caps(&info, caps)) {
                gint width = info.width;
                gint height = info.height;
                gint stride = GST_VIDEO_INFO_PLANE_STRIDE(&info, 0);

                // 创建灰度图
                cv::Mat grayFrame(height, width, CV_8UC1, map.data, stride);

                // 将灰度图转换为BGR
                cv::Mat bgrFrame(height, width, CV_8UC3);
                cv::cvtColor(grayFrame, bgrFrame, cv::COLOR_GRAY2BGR);

                // 现在bgrFrame包含了转换后的BGR图像
                // 你可以保存或显示bgrFrame
                // 例如保存为图片
                static int frame_count = 0;
                std::string filename = "frame_" + std::to_string(frame_count) + ".png";
                cv::imwrite(filename, bgrFrame);
                frame_count++; // 增加帧计数,以便为下一帧生成不同的文件名
            }
        }

        gst_buffer_unmap(buffer, &map);  
        gst_sample_unref(sample);  
    }  

    return GST_FLOW_OK;  
}

static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data)
{
    GMainLoop *loop = (GMainLoop *)data;

    switch (GST_MESSAGE_TYPE(msg))
    {
    case GST_MESSAGE_EOS:
        g_print("End of stream\n");
        g_main_loop_quit(loop);
        break;
    case GST_MESSAGE_ERROR:
    {
        gchar *debug;
        GError *error;

        gst_message_parse_error(msg, &error, &debug);
        g_printerr("Error: %s\n", error->message);
        g_error_free(error);
        g_free(debug);

        g_main_loop_quit(loop);
        break;
    }
    default:
        break;
    }

    return TRUE;
}

int main(int argc, char *argv[])
{
    gst_init(&argc, &argv);

    GstElement *pipeline = gst_parse_launch("udpsrc uri=udp://192.168.17.51:5700 ! application/x-rtp, media=video, clock-rate=90000, encoding-name=H264 ! rtph264depay ! avdec_h264 ! videoconvert ! videoscale ! appsink name=sink emit-signals=true", NULL);
    GstElement *sink = gst_bin_get_by_name(GST_BIN(pipeline), "sink");

    g_signal_connect(sink, "new-sample", G_CALLBACK(on_new_sample_from_sink), NULL);

    GstBus *bus = gst_element_get_bus(pipeline);
    GMainLoop *loop = g_main_loop_new(NULL, FALSE);

    gst_bus_add_watch(bus, bus_call, loop);

    gst_element_set_state(pipeline, GST_STATE_PLAYING);
    g_main_loop_run(loop);

    gst_element_set_state(pipeline, GST_STATE_NULL);
    gst_object_unref(bus);
    gst_object_unref(pipeline);
    g_main_loop_unref(loop);

    return 0;
}

运行命令g++ -o down_stream down_stream.cpp pkg-config --cflags --libs gstreamer-1.0 pkg-config --cflags --libs gstreamer-video-1.0 pkg-config --cflags --libs opencv4

作者:bai  创建时间:2024-09-14 16:20
最后编辑:bai  更新时间:2024-09-27 11:15