Commits (1)
......@@ -7,9 +7,16 @@ Please read the blog post for details.
The code below needs GStreamer from the git `main` branch at the time of
writing, or version 1.22 or newer.
## Baseline Sender / Receiver Code
## Sender Clock Time Retrieval
This branch contains the baseline sender/receiver code.
This branch contains the code that retrieves the sender clock times on the
receiver and renders them over the video frames on the receiver and also
prints them on stdout.
Both timestamps on the video frames should be the same: the one rendered on
the sender at the top and the one rendered on the receiver at the bottom.
![Screenshot](screenshot.png)
Run the sender with
......
......@@ -28,15 +28,46 @@ fn main() -> Result<(), Error> {
pipeline.set_property("uri", &uri);
// When the RTSP source is created, configure a latency of 40ms instead
// of the default of 2s on it.
// of the default of 2s on it and also add reference timestamp metadata
// with the sender clock times to each packet if possible.
pipeline.connect_closure(
"source-setup",
false,
glib::closure!(|_playbin: &gst::Pipeline, source: &gst::Element| {
source.set_property("latency", 40u32);
source.set_property("add-reference-timestamp-meta", true);
}),
);
// Create a timeoverlay element as custom video filter to render the
// timestamps from the reference timestamp metadata on top of the video
// frames in the bottom left.
//
// Also add a pad probe on its sink pad to print the same timestamp to
// stdout on each frame.
let timeoverlay =
gst::ElementFactory::make("timeoverlay", None).context("Creating timeoverlay")?;
timeoverlay.set_property_from_str("time-mode", "reference-timestamp");
timeoverlay.set_property_from_str("valignment", "bottom");
let sinkpad = timeoverlay
.static_pad("video_sink")
.expect("Failed to get timeoverlay sinkpad");
sinkpad
.add_probe(gst::PadProbeType::BUFFER, |_pad, info| {
if let Some(gst::PadProbeData::Buffer(ref buffer)) = info.data {
if let Some(meta) = buffer.meta::<gst::ReferenceTimestampMeta>() {
println!("Have sender clock time {}", meta.timestamp());
} else {
println!("Have no sender clock time");
}
}
gst::PadProbeReturn::Ok
})
.expect("Failed to add pad probe");
pipeline.set_property("video-filter", &timeoverlay);
// Start the pipeline.
pipeline
.set_state(gst::State::Playing)
......
......@@ -222,7 +222,26 @@ mod media {
impl ObjectImpl for Media {}
impl RTSPMediaImpl for Media {}
impl RTSPMediaImpl for Media {
fn setup_rtpbin(
&self,
media: &Self::Type,
rtpbin: &gst::Element,
) -> Result<(), gst::LoggableError> {
self.parent_setup_rtpbin(media, rtpbin)?;
// Use local pipeline clock time as RTP NTP time source instead of using
// the local wallclock time converted to the NTP epoch.
rtpbin.set_property_from_str("ntp-time-source", "clock-time");
// Use the capture time instead of the send time for the RTP / NTP timestamp
// mapping. The difference between the two options is the capture/encoder/etc.
// latency that is introduced before sending.
rtpbin.set_property("rtcp-sync-send-time", false);
Ok(())
}
}
}
glib::wrapper! {
......