Commit 21302016 authored by Bilal Elmoussaoui's avatar Bilal Elmoussaoui
Browse files

manual changes post ObjectExt improvements

parent 0d009bca
......@@ -81,7 +81,7 @@ fn example_main() -> Result<(), Error> {
gst::ElementFactory::make("decodebin", None).map_err(|_| MissingElement("decodebin"))?;
// Tell the filesrc what file to load
src.set_property("location", uri)?;
src.set_property("location", uri);
pipeline.add_many(&[&src, &decodebin])?;
gst::Element::link_many(&[&src, &decodebin])?;
......
......@@ -76,9 +76,7 @@ fn configure_encodebin(encodebin: &gst::Element) {
.build();
// Finally, apply the EncodingProfile onto our encodebin element.
encodebin
.set_property("profile", &container_profile)
.expect("set profile property failed");
encodebin.set_property("profile", &container_profile);
}
fn example_main() -> Result<(), Error> {
......@@ -104,10 +102,8 @@ fn example_main() -> Result<(), Error> {
let sink =
gst::ElementFactory::make("filesink", None).map_err(|_| MissingElement("filesink"))?;
src.set_property("uri", uri)
.expect("setting URI Property failed");
sink.set_property("location", output_file)
.expect("setting location property failed");
src.set_property("uri", uri);
sink.set_property("location", output_file);
// Configure the encodebin.
// Here we tell the bin what format we expect it to create at its output.
......
......@@ -33,11 +33,11 @@ fn create_ui(app: &gtk::Application) {
// video frames to our texture (if they are not already in the GPU). Now we tell the OpenGL-sink
// about our gtkglsink element, form where it will retrieve the OpenGL texture to fill.
let glsinkbin = gst::ElementFactory::make("glsinkbin", None).unwrap();
glsinkbin.set_property("sink", &gtkglsink).unwrap();
glsinkbin.set_property("sink", &gtkglsink);
// The gtkglsink creates the gtk widget for us. This is accessible through a property.
// So we get it and use it later to add it to our gui.
let widget = gtkglsink.property("widget").unwrap();
(glsinkbin, widget.get::<gtk::Widget>().unwrap())
let widget = gtkglsink.property::<gtk::Widget>("widget");
(glsinkbin, widget)
} else {
// Unfortunately, using the OpenGL widget didn't work out, so we will have to render
// our frames manually, using the CPU. An example why this may fail is, when
......@@ -45,8 +45,8 @@ fn create_ui(app: &gtk::Application) {
let sink = gst::ElementFactory::make("gtksink", None).unwrap();
// The gtksink creates the gtk widget for us. This is accessible through a property.
// So we get it and use it later to add it to our gui.
let widget = sink.property("widget").unwrap();
(sink, widget.get::<gtk::Widget>().unwrap())
let widget = sink.property::<gtk::Widget>("widget");
(sink, widget)
};
pipeline.add_many(&[&src, &sink]).unwrap();
......
......@@ -85,7 +85,7 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
.field("height", 800i32)
.field("framerate", gst::Fraction::new(15, 1))
.build();
capsfilter.set_property("caps", &caps).unwrap();
capsfilter.set_property("caps", &caps);
// The videotestsrc supports multiple test patterns. In this example, we will use the
// pattern with a white ball moving around the video's center point.
......@@ -131,141 +131,145 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
//
// In this case, the signal passes the gst::Element and a gst::Sample with
// the current buffer
overlay
.connect("draw", false, move |args| {
use std::f64::consts::PI;
let drawer = &drawer_clone;
let drawer = drawer.lock().unwrap();
// Get the signal's arguments
let _overlay = args[0].get::<gst::Element>().unwrap();
let sample = args[1].get::<gst::Sample>().unwrap();
let buffer = sample.buffer().unwrap();
let timestamp = buffer.pts().unwrap();
let info = drawer.info.as_ref().unwrap();
let layout = drawer.layout.borrow();
let angle = 2.0
* PI
* (timestamp % (10 * gst::ClockTime::SECOND)).nseconds() as f64
/ (10.0 * gst::ClockTime::SECOND.nseconds() as f64);
/* Create a gst::Buffer for Cairo to draw into */
let frame_width = info.width() as usize;
let frame_height = info.height() as usize;
let stride = 4 * frame_width;
let frame_size = stride * frame_height;
/* Create an RGBA buffer, and add a video meta that the videooverlaycomposition expects */
let mut buffer = gst::Buffer::with_size(frame_size).unwrap();
gst_video::VideoMeta::add(
buffer.get_mut().unwrap(),
gst_video::VideoFrameFlags::empty(),
gst_video::VideoFormat::Bgra,
frame_width as u32,
frame_height as u32,
).unwrap();
let buffer = buffer.into_mapped_buffer_writable().unwrap();
let buffer = {
let buffer_ptr = unsafe { buffer.buffer().as_ptr() };
let surface = cairo::ImageSurface::create_for_data(
buffer,
cairo::Format::ARgb32,
frame_width as i32,
frame_height as i32,
stride as i32,
)
.unwrap();
let cr = cairo::Context::new(&surface).expect("Failed to create cairo context");
cr.save().expect("Failed to save state");
cr.set_operator(cairo::Operator::Clear);
cr.paint().expect("Failed to clear background");
cr.restore().expect("Failed to restore state");
// The image we draw (the text) will be static, but we will change the
// transformation on the drawing context, which rotates and shifts everything
// that we draw afterwards. Like this, we have no complicated calulations
// in the actual drawing below.
// Calling multiple transformation methods after each other will apply the
// new transformation on top. If you repeat the cr.rotate(angle) line below
// this a second time, everything in the canvas will rotate twice as fast.
cr.translate(
f64::from(info.width()) / 2.0,
f64::from(info.height()) / 2.0,
);
cr.rotate(angle);
// This loop will render 10 times the string "GStreamer" in a circle
for i in 0..10 {
// Cairo, like most rendering frameworks, is using a stack for transformations
// with this, we push our current transformation onto this stack - allowing us
// to make temporary changes / render something / and then returning to the
// previous transformations.
cr.save().expect("Failed to save state");
let angle = (360. * f64::from(i)) / 10.0;
let red = (1.0 + f64::cos((angle - 60.0) * PI / 180.0)) / 2.0;
cr.set_source_rgb(red, 0.0, 1.0 - red);
cr.rotate(angle * PI / 180.0);
// Update the text layout. This function is only updating pango's internal state.
// So e.g. that after a 90 degree rotation it knows that what was previously going
// to end up as a 200x100 rectangle would now be 100x200.
pangocairo::functions::update_layout(&cr, &**layout);
let (width, _height) = layout.size();
// Using width and height of the text, we can properly possition it within
// our canvas.
cr.move_to(
-(f64::from(width) / f64::from(pango::SCALE)) / 2.0,
-(f64::from(info.height())) / 2.0,
);
// After telling the layout object where to draw itself, we actually tell
// it to draw itself into our cairo context.
pangocairo::functions::show_layout(&cr, &**layout);
// Here we go one step up in our stack of transformations, removing any
// changes we did to them since the last call to cr.save();
cr.restore().expect("Failed to restore state");
}
overlay.connect("draw", false, move |args| {
use std::f64::consts::PI;
let drawer = &drawer_clone;
let drawer = drawer.lock().unwrap();
// Get the signal's arguments
let _overlay = args[0].get::<gst::Element>().unwrap();
let sample = args[1].get::<gst::Sample>().unwrap();
let buffer = sample.buffer().unwrap();
let timestamp = buffer.pts().unwrap();
let info = drawer.info.as_ref().unwrap();
let layout = drawer.layout.borrow();
let angle = 2.0 * PI * (timestamp % (10 * gst::ClockTime::SECOND)).nseconds() as f64
/ (10.0 * gst::ClockTime::SECOND.nseconds() as f64);
/* Create a gst::Buffer for Cairo to draw into */
let frame_width = info.width() as usize;
let frame_height = info.height() as usize;
let stride = 4 * frame_width;
let frame_size = stride * frame_height;
/* Create an RGBA buffer, and add a video meta that the videooverlaycomposition expects */
let mut buffer = gst::Buffer::with_size(frame_size).unwrap();
gst_video::VideoMeta::add(
buffer.get_mut().unwrap(),
gst_video::VideoFrameFlags::empty(),
gst_video::VideoFormat::Bgra,
frame_width as u32,
frame_height as u32,
)
.unwrap();
// Safety: The surface still owns a mutable reference to the buffer but our reference
// to the surface here is the last one. After dropping the surface the buffer would be
// freed, so we keep an additional strong reference here before dropping the surface,
// which is then returned. As such it's guaranteed that nothing is using the buffer
// anymore mutably.
drop(cr);
unsafe {
assert_eq!(
cairo::ffi::cairo_surface_get_reference_count(surface.to_raw_none()),
1
);
let buffer = glib::translate::from_glib_none(buffer_ptr);
drop(surface);
buffer
}
};
/* Turn the buffer into a VideoOverlayRectangle, then place
* that into a VideoOverlayComposition and return it.
*
* A VideoOverlayComposition can take a Vec of such rectangles
* spaced around the video frame, but we're just outputting 1
* here */
let rect = gst_video::VideoOverlayRectangle::new_raw(
&buffer,
0, 0, frame_width as u32, frame_height as u32,
gst_video::VideoOverlayFormatFlags::PREMULTIPLIED_ALPHA,
let buffer = buffer.into_mapped_buffer_writable().unwrap();
let buffer = {
let buffer_ptr = unsafe { buffer.buffer().as_ptr() };
let surface = cairo::ImageSurface::create_for_data(
buffer,
cairo::Format::ARgb32,
frame_width as i32,
frame_height as i32,
stride as i32,
)
.unwrap();
let cr = cairo::Context::new(&surface).expect("Failed to create cairo context");
cr.save().expect("Failed to save state");
cr.set_operator(cairo::Operator::Clear);
cr.paint().expect("Failed to clear background");
cr.restore().expect("Failed to restore state");
// The image we draw (the text) will be static, but we will change the
// transformation on the drawing context, which rotates and shifts everything
// that we draw afterwards. Like this, we have no complicated calulations
// in the actual drawing below.
// Calling multiple transformation methods after each other will apply the
// new transformation on top. If you repeat the cr.rotate(angle) line below
// this a second time, everything in the canvas will rotate twice as fast.
cr.translate(
f64::from(info.width()) / 2.0,
f64::from(info.height()) / 2.0,
);
cr.rotate(angle);
// This loop will render 10 times the string "GStreamer" in a circle
for i in 0..10 {
// Cairo, like most rendering frameworks, is using a stack for transformations
// with this, we push our current transformation onto this stack - allowing us
// to make temporary changes / render something / and then returning to the
// previous transformations.
cr.save().expect("Failed to save state");
let angle = (360. * f64::from(i)) / 10.0;
let red = (1.0 + f64::cos((angle - 60.0) * PI / 180.0)) / 2.0;
cr.set_source_rgb(red, 0.0, 1.0 - red);
cr.rotate(angle * PI / 180.0);
// Update the text layout. This function is only updating pango's internal state.
// So e.g. that after a 90 degree rotation it knows that what was previously going
// to end up as a 200x100 rectangle would now be 100x200.
pangocairo::functions::update_layout(&cr, &**layout);
let (width, _height) = layout.size();
// Using width and height of the text, we can properly possition it within
// our canvas.
cr.move_to(
-(f64::from(width) / f64::from(pango::SCALE)) / 2.0,
-(f64::from(info.height())) / 2.0,
);
// After telling the layout object where to draw itself, we actually tell
// it to draw itself into our cairo context.
pangocairo::functions::show_layout(&cr, &**layout);
Some(gst_video::VideoOverlayComposition::new(Some(&rect)).unwrap().to_value())
})
.unwrap();
// Here we go one step up in our stack of transformations, removing any
// changes we did to them since the last call to cr.save();
cr.restore().expect("Failed to restore state");
}
// Safety: The surface still owns a mutable reference to the buffer but our reference
// to the surface here is the last one. After dropping the surface the buffer would be
// freed, so we keep an additional strong reference here before dropping the surface,
// which is then returned. As such it's guaranteed that nothing is using the buffer
// anymore mutably.
drop(cr);
unsafe {
assert_eq!(
cairo::ffi::cairo_surface_get_reference_count(surface.to_raw_none()),
1
);
let buffer = glib::translate::from_glib_none(buffer_ptr);
drop(surface);
buffer
}
};
/* Turn the buffer into a VideoOverlayRectangle, then place
* that into a VideoOverlayComposition and return it.
*
* A VideoOverlayComposition can take a Vec of such rectangles
* spaced around the video frame, but we're just outputting 1
* here */
let rect = gst_video::VideoOverlayRectangle::new_raw(
&buffer,
0,
0,
frame_width as u32,
frame_height as u32,
gst_video::VideoOverlayFormatFlags::PREMULTIPLIED_ALPHA,
);
Some(
gst_video::VideoOverlayComposition::new(Some(&rect))
.unwrap()
.to_value(),
)
});
// Add a signal handler to the overlay's "caps-changed" signal. This could e.g.
// be called when the sink that we render to does not support resizing the image
......@@ -274,17 +278,15 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
// resize our canvas's size.
// Another possibility for when this might happen is, when our video is a network
// stream that dynamically changes resolution when enough bandwith is available.
overlay
.connect("caps-changed", false, move |args| {
let _overlay = args[0].get::<gst::Element>().unwrap();
let caps = args[1].get::<gst::Caps>().unwrap();
overlay.connect("caps-changed", false, move |args| {
let _overlay = args[0].get::<gst::Element>().unwrap();
let caps = args[1].get::<gst::Caps>().unwrap();
let mut drawer = drawer.lock().unwrap();
drawer.info = Some(gst_video::VideoInfo::from_caps(&caps).unwrap());
let mut drawer = drawer.lock().unwrap();
drawer.info = Some(gst_video::VideoInfo::from_caps(&caps).unwrap());
None
})
.unwrap();
None
});
Ok(pipeline)
}
......
......@@ -83,7 +83,7 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
.field("width", 800i32)
.field("height", 800i32)
.build();
capsfilter.set_property("caps", &caps).unwrap();
capsfilter.set_property("caps", &caps);
// The videotestsrc supports multiple test patterns. In this example, we will use the
// pattern with a white ball moving around the video's center point.
......@@ -125,76 +125,74 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
// passed as array of glib::Value. For a documentation about the actual arguments
// it is always a good idea to either check the element's signals using either
// gst-inspect, or the online documentation.
overlay
.connect("draw", false, move |args| {
use std::f64::consts::PI;
let drawer = &drawer_clone;
let drawer = drawer.lock().unwrap();
// Get the signal's arguments
let _overlay = args[0].get::<gst::Element>().unwrap();
// This is the cairo context. This is the root of all of cairo's
// drawing functionality.
let cr = args[1].get::<cairo::Context>().unwrap();
let timestamp = args[2].get::<gst::ClockTime>().unwrap();
let _duration = args[3].get::<gst::ClockTime>().unwrap();
let info = drawer.info.as_ref().unwrap();
let layout = drawer.layout.borrow();
let angle = 2.0 * PI * (timestamp % (10 * gst::ClockTime::SECOND)).nseconds() as f64
/ (10.0 * gst::ClockTime::SECOND.nseconds() as f64);
// The image we draw (the text) will be static, but we will change the
// transformation on the drawing context, which rotates and shifts everything
// that we draw afterwards. Like this, we have no complicated calulations
// in the actual drawing below.
// Calling multiple transformation methods after each other will apply the
// new transformation on top. If you repeat the cr.rotate(angle) line below
// this a second time, everything in the canvas will rotate twice as fast.
cr.translate(
f64::from(info.width()) / 2.0,
f64::from(info.height()) / 2.0,
overlay.connect("draw", false, move |args| {
use std::f64::consts::PI;
let drawer = &drawer_clone;
let drawer = drawer.lock().unwrap();
// Get the signal's arguments
let _overlay = args[0].get::<gst::Element>().unwrap();
// This is the cairo context. This is the root of all of cairo's
// drawing functionality.
let cr = args[1].get::<cairo::Context>().unwrap();
let timestamp = args[2].get::<gst::ClockTime>().unwrap();
let _duration = args[3].get::<gst::ClockTime>().unwrap();
let info = drawer.info.as_ref().unwrap();
let layout = drawer.layout.borrow();
let angle = 2.0 * PI * (timestamp % (10 * gst::ClockTime::SECOND)).nseconds() as f64
/ (10.0 * gst::ClockTime::SECOND.nseconds() as f64);
// The image we draw (the text) will be static, but we will change the
// transformation on the drawing context, which rotates and shifts everything
// that we draw afterwards. Like this, we have no complicated calulations
// in the actual drawing below.
// Calling multiple transformation methods after each other will apply the
// new transformation on top. If you repeat the cr.rotate(angle) line below
// this a second time, everything in the canvas will rotate twice as fast.
cr.translate(
f64::from(info.width()) / 2.0,
f64::from(info.height()) / 2.0,
);
cr.rotate(angle);
// This loop will render 10 times the string "GStreamer" in a circle
for i in 0..10 {
// Cairo, like most rendering frameworks, is using a stack for transformations
// with this, we push our current transformation onto this stack - allowing us
// to make temporary changes / render something / and then returning to the
// previous transformations.
cr.save().expect("Failed to save state");
let angle = (360. * f64::from(i)) / 10.0;
let red = (1.0 + f64::cos((angle - 60.0) * PI / 180.0)) / 2.0;
cr.set_source_rgb(red, 0.0, 1.0 - red);
cr.rotate(angle * PI / 180.0);
// Update the text layout. This function is only updating pango's internal state.
// So e.g. that after a 90 degree rotation it knows that what was previously going
// to end up as a 200x100 rectangle would now be 100x200.
pangocairo::functions::update_layout(&cr, &**layout);
let (width, _height) = layout.size();
// Using width and height of the text, we can properly possition it within
// our canvas.
cr.move_to(
-(f64::from(width) / f64::from(pango::SCALE)) / 2.0,
-(f64::from(info.height())) / 2.0,
);
cr.rotate(angle);
// This loop will render 10 times the string "GStreamer" in a circle
for i in 0..10 {
// Cairo, like most rendering frameworks, is using a stack for transformations
// with this, we push our current transformation onto this stack - allowing us
// to make temporary changes / render something / and then returning to the
// previous transformations.
cr.save().expect("Failed to save state");
let angle = (360. * f64::from(i)) / 10.0;
let red = (1.0 + f64::cos((angle - 60.0) * PI / 180.0)) / 2.0;
cr.set_source_rgb(red, 0.0, 1.0 - red);
cr.rotate(angle * PI / 180.0);
// Update the text layout. This function is only updating pango's internal state.
// So e.g. that after a 90 degree rotation it knows that what was previously going
// to end up as a 200x100 rectangle would now be 100x200.
pangocairo::functions::update_layout(&cr, &**layout);
let (width, _height) = layout.size();
// Using width and height of the text, we can properly possition it within
// our canvas.
cr.move_to(
-(f64::from(width) / f64::from(pango::SCALE)) / 2.0,
-(f64::from(info.height())) / 2.0,
);
// After telling the layout object where to draw itself, we actually tell
// it to draw itself into our cairo context.
pangocairo::functions::show_layout(&cr, &**layout);
// Here we go one step up in our stack of transformations, removing any
// changes we did to them since the last call to cr.save();
cr.restore().expect("Failed to restore state");
}
// After telling the layout object where to draw itself, we actually tell
// it to draw itself into our cairo context.
pangocairo::functions::show_layout(&cr, &**layout);
// Here we go one step up in our stack of transformations, removing any
// changes we did to them since the last call to cr.save();
cr.restore().expect("Failed to restore state");
}
None
})
.unwrap();
None
});
// Add a signal handler to the overlay's "caps-changed" signal. This could e.g.
// be called when the sink that we render to does not support resizing the image
......@@ -203,17 +201,15 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
// resize our canvas's size.
// Another possibility for when this might happen is, when our video is a network
// stream that dynamically changes resolution when enough bandwith is available.
overlay
.connect("caps-changed", false, move |args| {
let _overlay = args[0].get::<gst::Element>().unwrap();
let caps = args[1].get::<gst::Caps>().unwrap();
overlay.connect("caps-changed", false, move |args| {
let _overlay = args[0].get::<gst::Element>().unwrap();
let caps = args[1].get::<gst::Caps>().unwrap();
let mut drawer = drawer.lock().unwrap();
drawer.info = Some(gst_video::VideoInfo::from_caps(&caps).unwrap());
let mut drawer = drawer.lock().unwrap();
drawer.info = Some(gst_video::VideoInfo::from_caps(&caps).unwrap());
None
})
.unwrap();
None
});
Ok(pipeline)
}
......
......@@ -29,7 +29,7 @@ fn example_main() {
// Create a new playbin element, and tell it what uri to play back.
let playbin = gst::ElementFactory::make("playbin", None).unwrap();
playbin.set_property("uri", uri).unwrap();
playbin.set_property("uri", uri);
// For flags handling
// With flags, one can configure playbin's behavior such as whether it
......@@ -50,53 +50,48 @@ fn example_main() {
// - Live streams (such as internet radios) update this metadata during the stream
// Note that this signal will be emitted from the streaming threads usually,
// not the application's threads!
playbin
.connect("audio-tags-changed", false, |values| {
// The metadata of any of the contained audio streams changed
// In the case of a live-stream from an internet radio, this could for example
// mark the beginning of a new track, or a new DJ.
let playbin = values[0]
.get::<glib::Object>()
.expect("playbin \"audio-tags-changed\" signal values[1]");
// This gets the index of the stream that changed. This is neccessary, since
// there could e.g. be multiple audio streams (english, spanish, ...).
let idx = values[1]
.get::<i32>()
.expect("playbin \"audio-tags-changed\" signal values[1]");
println!("audio tags of audio stream {} changed:", idx);
// HELP: is this correct?
// We were only notified about the change of metadata. If we want to do
// something with it, we first need to actually query the metadata from the playbin.
// We do this by facilliating the get-audio-tags action-signal on playbin.
// Sending an action-signal to an element essentially is a function call on the element.
// It is done that way, because elements do not have their own function API, they are
// relying on GStreamer and GLib's API. The only way an element can communicate with an
// application is via properties, signals or action signals (or custom messages, events, queries).
// So what the following code does, is essentially asking playbin to tell us its already
// internally stored tag list for this stream index.
let tags = playbin
.emit_by_name("get-audio-tags", &[&idx])
.unwrap()
.unwrap();
let tags = tags.get::<gst::TagList>().expect