rtp2rtmp_video.c
========
1. 推流到rtmp服务器
1 2 3 | $ ffmpeg -re -i ./fulankelin-hd.mp4 -an -vcodec h264 -f rtp rtp://127.0.0.1:5004 -vn -acodec libopus -f rtp rtp://127.0.0.1:5003 $ |
2. SDP信息
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 | SDP: SDP: v=0 o=- 0 0 IN IP4 127.0.0.1 s=No Name t=0 0 a=tool:libavformat 58.29.100 m=video 5004 RTP/AVP 96 c=IN IP4 127.0.0.1 a=rtpmap:96 H264/90000 a=fmtp:96 packetization-mode=1 m=audio 5003 RTP/AVP 97 c=IN IP4 127.0.0.1 b=AS:96 a=rtpmap:97 opus/48000/2 a=fmtp:97 sprop-stereo=1 . |
3. gst-launch测试
1 2 3 4 5 6 7 8 9 10 | gst-launch-1.0 -em \ rtpbin name=rtpbin latency=5 \ udpsrc port=5003 caps="application/x-rtp,media=(string)audio,clock-rate=(int)48000,encoding-name=(string)OPUS" ! rtpbin.recv_rtp_sink_0 \ rtpbin. ! rtpopusdepay ! opusdec ! audioconvert ! audioresample ! avenc_aac ! mux. \ udpsrc port=5004 caps="application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264" ! rtpbin.recv_rtp_sink_1 \ rtpbin. ! rtph264depay ! h264parse ! mux. \ flvmux name=mux streamable=true ! rtmpsink sync=false location=rtmp://u1802/live/demo . |
4. 程序代码
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 | #include <string.h> #include <math.h> #include <gst/gst.h> #define VIDEO_CAPS "application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264" #define AUDIO_CAPS "application/x-rtp,media=(string)audio,clock-rate=(int)48000,encoding-name=(string)OPUS" /* will be called when rtpbin has validated a payload that we can depayload */ static void pad_added_cb(GstElement *rtpbin, GstPad *new_pad, GstElement *depay) { char *pad_name = GST_PAD_NAME(new_pad); char *depay_name = gst_element_get_name(depay); if (strstr(pad_name, "recv_rtp_src_0_") && strstr(depay_name, "audiodepay")) { GstPad *sinkpad; GstPadLinkReturn lres; g_print("new payload on rtpbin: %s %s %s\n", gst_element_get_name(rtpbin), GST_PAD_NAME(new_pad), gst_element_get_name(depay)); sinkpad = gst_element_get_static_pad(depay, "sink"); g_assert(sinkpad); lres = gst_pad_link(new_pad, sinkpad); g_assert(lres == GST_PAD_LINK_OK); gst_object_unref(sinkpad); } else if (strstr(pad_name, "recv_rtp_src_1_") && strstr(depay_name, "videodepay")) { GstPad *sinkpad; GstPadLinkReturn lres; g_print("new payload on rtpbin: %s %s %s\n", gst_element_get_name(rtpbin), GST_PAD_NAME(new_pad), gst_element_get_name(depay)); sinkpad = gst_element_get_static_pad(depay, "sink"); g_assert(sinkpad); lres = gst_pad_link(new_pad, sinkpad); g_assert(lres == GST_PAD_LINK_OK); gst_object_unref(sinkpad); } } int main(int argc, char *argv[]) { GMainLoop *loop; GstElement *pipeline; GstElement *rtpbin; GstElement *audiosrc, *audiodepay, *audiodec, *audiores, *audioconv, *audiosink; GstElement *videosrc, *videodepay, *videosink; GstElement *flvmux, *rtmpsink; gboolean res; GstCaps *caps; GstPadLinkReturn lres; GstPad *srcpad, *audio_sinkpad, *video_sinkpad; gst_init(&argc, &argv); pipeline = gst_pipeline_new(NULL); g_assert(pipeline); /* the rtpbin element */ rtpbin = gst_element_factory_make("rtpbin", "rtpbin"); g_assert(rtpbin); gst_bin_add(GST_BIN(pipeline), rtpbin); // 001 源 audiosrc = gst_element_factory_make("udpsrc", "audiosrc"); g_assert(audiosrc); g_object_set(audiosrc, "port", 5003, NULL); caps = gst_caps_from_string(AUDIO_CAPS); g_object_set(audiosrc, "caps", caps, NULL); gst_caps_unref(caps); gst_bin_add(GST_BIN(pipeline), audiosrc); videosrc = gst_element_factory_make("udpsrc", "videosrc"); g_assert(videosrc); g_object_set(videosrc, "port", 5004, NULL); caps = gst_caps_from_string(VIDEO_CAPS); g_object_set(videosrc, "caps", caps, NULL); gst_caps_unref(caps); gst_bin_add(GST_BIN(pipeline), videosrc); /* now link all to the rtpbin, start by getting an RTP sinkpad for session 0 */ srcpad = gst_element_get_static_pad(audiosrc, "src"); audio_sinkpad = gst_element_get_request_pad(rtpbin, "recv_rtp_sink_0"); lres = gst_pad_link(srcpad, audio_sinkpad); g_assert(lres == GST_PAD_LINK_OK); gst_object_unref(srcpad); srcpad = gst_element_get_static_pad(videosrc, "src"); video_sinkpad = gst_element_get_request_pad(rtpbin, "recv_rtp_sink_1"); lres = gst_pad_link(srcpad, video_sinkpad); g_assert(lres == GST_PAD_LINK_OK); gst_object_unref(srcpad); /* the depayloading and decoding */ audiodepay = gst_element_factory_make("rtpopusdepay", "audiodepay"); g_assert(audiodepay); audiodec = gst_element_factory_make("opusdec", "audiodec"); g_assert(audiodepay); /* the audio playback and format conversion */ audioconv = gst_element_factory_make("audioconvert", "audioconv"); g_assert(audioconv); audiores = gst_element_factory_make("audioresample", "audiores"); g_assert(audiores); audiosink = gst_element_factory_make("avenc_aac", "audiosink"); // autoaudiosink voaacenc avenc_aac avenc_opus g_assert(audiosink); /* add depayloading and playback to the pipeline and link */ gst_bin_add_many(GST_BIN(pipeline), audiodepay, audiodec, audioconv, audiores, audiosink, NULL); res = gst_element_link_many(audiodepay, audiodec, audioconv, audiores, audiosink, NULL); g_assert(res == TRUE); videodepay = gst_element_factory_make("rtph264depay", "videodepay"); g_assert(videodepay); videosink = gst_element_factory_make("h264parse", "videosink"); g_assert(videosink); gst_bin_add_many(GST_BIN(pipeline), videodepay, videosink, NULL); res = gst_element_link_many(videodepay, videosink, NULL); g_assert(res == TRUE); // flvmux flvmux = gst_element_factory_make("flvmux", "flvmux"); g_assert(flvmux); g_object_set(flvmux, "streamable", TRUE, NULL); gst_bin_add(GST_BIN(pipeline), flvmux); res = gst_element_link(audiosink, flvmux); g_assert(res == TRUE); res = gst_element_link(videosink, flvmux); g_assert(res == TRUE); rtmpsink = gst_element_factory_make("rtmpsink", "rtmpsink"); g_assert(rtmpsink); g_object_set(rtmpsink, "sync", FALSE, NULL); g_object_set(rtmpsink, "location", "rtmp://u1802/live/demo2", NULL); gst_bin_add(GST_BIN(pipeline), rtmpsink); res = gst_element_link(flvmux, rtmpsink); g_assert(res == TRUE); /* the RTP pad that we have to connect to the depayloader will be created * dynamically so we connect to the pad-added signal, pass the depayloader as * user_data so that we can link to it. */ g_signal_connect(rtpbin, "pad-added", G_CALLBACK(pad_added_cb), audiodepay); g_signal_connect(rtpbin, "pad-added", G_CALLBACK(pad_added_cb), videodepay); /* set the pipeline to playing */ g_print("starting receiver pipeline\n"); gst_element_set_state(pipeline, GST_STATE_PLAYING); /* we need to run a GLib main loop to get the messages */ loop = g_main_loop_new(NULL, FALSE); g_main_loop_run(loop); g_print("stopping receiver pipeline\n"); gst_element_set_state(pipeline, GST_STATE_NULL); gst_object_unref(loop); gst_object_unref(pipeline); gst_object_unref(audio_sinkpad); gst_object_unref(video_sinkpad); gst_object_unref(rtmpsink); gst_object_unref(flvmux); gst_object_unref(rtpbin); gst_object_unref(audiosrc); gst_object_unref(audiodepay); gst_object_unref(audiodec); gst_object_unref(audiores); gst_object_unref(audioconv); gst_object_unref(audiosink); gst_object_unref(videosrc); gst_object_unref(videodepay); gst_object_unref(videosink); return 0; } // |