glupload.rs 23.1 KB
Newer Older
1 2 3
// This example demostrates how to output GL textures, within an
// EGL/X11 context provided by the application, and render those
// textures in the GL application.
4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21

// {videotestsrc} - { glsinkbin }

#[macro_use]
extern crate gstreamer as gst;
use gst::prelude::*;

extern crate gstreamer_app as gst_app;
extern crate gstreamer_gl as gst_gl;
use gst_gl::prelude::*;
extern crate gstreamer_video as gst_video;

extern crate glib;

use std::error::Error as StdError;
use std::ffi::CStr;
use std::mem;
use std::ptr;
22
use std::sync::mpsc;
23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51

extern crate failure;
use failure::Error;

#[macro_use]
extern crate failure_derive;

extern crate glutin;

#[path = "../examples-common.rs"]
mod examples_common;

#[derive(Debug, Fail)]
#[fail(display = "Missing element {}", _0)]
struct MissingElement(&'static str);

#[derive(Debug, Fail)]
#[fail(
    display = "Received error from {}: {} (debug: {:?})",
    src, error, debug
)]
struct ErrorMessage {
    src: String,
    error: String,
    debug: Option<String>,
    #[cause]
    cause: glib::Error,
}

52
#[rustfmt::skip]
53 54 55 56 57 58 59 60 61
static VERTICES: [f32; 20] = [
     1.0,  1.0, 0.0, 1.0, 0.0,
    -1.0,  1.0, 0.0, 0.0, 0.0,
    -1.0, -1.0, 0.0, 0.0, 1.0,
     1.0, -1.0, 0.0, 1.0, 1.0,
];

static INDICES: [u16; 6] = [0, 1, 2, 0, 2, 3];

62
#[rustfmt::skip]
63 64 65 66 67 68 69
static IDENTITY: [f32; 16] = [
    1.0, 0.0, 0.0, 0.0,
    0.0, 1.0, 0.0, 0.0,
    0.0, 0.0, 1.0, 0.0,
    0.0, 0.0, 0.0, 1.0,
];

70
const VS_SRC: &[u8] = b"
71 72 73 74 75 76 77 78 79 80 81
uniform mat4 u_transformation;
attribute vec4 a_position;
attribute vec2 a_texcoord;
varying vec2 v_texcoord;

void main() {
    gl_Position = u_transformation * a_position;
    v_texcoord = a_texcoord;
}
\0";

82
const FS_SRC: &[u8] = b"
83 84 85 86 87 88 89 90 91 92 93
#ifdef GL_ES
precision mediump float;
#endif
varying vec2 v_texcoord;
uniform sampler2D tex;

void main() {
    gl_FragColor = texture2D(tex, v_texcoord);
}
\0";

94 95 96
#[allow(clippy::unreadable_literal)]
#[allow(clippy::unused_unit)]
#[allow(clippy::too_many_arguments)]
97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209
mod gl {
    pub use self::Gles2 as Gl;
    include!(concat!(env!("OUT_DIR"), "/test_gl_bindings.rs"));
}

struct Gl {
    gl: gl::Gl,
    program: gl::types::GLuint,
    attr_position: gl::types::GLint,
    attr_texture: gl::types::GLint,
    vao: gl::types::GLuint,
    vertex_buffer: gl::types::GLuint,
    vbo_indices: gl::types::GLuint,
}

impl Gl {
    fn draw_frame(&self, texture_id: gl::types::GLuint) {
        unsafe {
            // render
            self.gl.ClearColor(0.0, 0.0, 0.0, 1.0);
            self.gl.Clear(gl::COLOR_BUFFER_BIT);

            self.gl.BlendColor(0.0, 0.0, 0.0, 1.0);
            if self.gl.BlendFuncSeparate.is_loaded() {
                self.gl.BlendFuncSeparate(
                    gl::SRC_ALPHA,
                    gl::CONSTANT_COLOR,
                    gl::ONE,
                    gl::ONE_MINUS_SRC_ALPHA,
                );
            } else {
                self.gl.BlendFunc(gl::SRC_ALPHA, gl::CONSTANT_COLOR);
            }
            self.gl.BlendEquation(gl::FUNC_ADD);
            self.gl.Enable(gl::BLEND);

            self.gl.UseProgram(self.program);

            if self.gl.BindVertexArray.is_loaded() {
                self.gl.BindVertexArray(self.vao);
            }

            {
                self.gl
                    .BindBuffer(gl::ELEMENT_ARRAY_BUFFER, self.vbo_indices);
                self.gl.BindBuffer(gl::ARRAY_BUFFER, self.vertex_buffer);

                // Load the vertex position
                self.gl.VertexAttribPointer(
                    self.attr_position as gl::types::GLuint,
                    3,
                    gl::FLOAT,
                    gl::FALSE,
                    (5 * mem::size_of::<f32>()) as gl::types::GLsizei,
                    ptr::null(),
                );

                // Load the texture coordinate
                self.gl.VertexAttribPointer(
                    self.attr_texture as gl::types::GLuint,
                    2,
                    gl::FLOAT,
                    gl::FALSE,
                    (5 * mem::size_of::<f32>()) as gl::types::GLsizei,
                    (3 * mem::size_of::<f32>()) as *const () as *const _,
                );

                self.gl.EnableVertexAttribArray(self.attr_position as _);
                self.gl.EnableVertexAttribArray(self.attr_texture as _);
            }

            self.gl.ActiveTexture(gl::TEXTURE0);
            self.gl.BindTexture(gl::TEXTURE_2D, texture_id);

            let location = self
                .gl
                .GetUniformLocation(self.program, b"tex\0".as_ptr() as *const _);
            self.gl.Uniform1i(location, 0);

            let location = self
                .gl
                .GetUniformLocation(self.program, b"u_transformation\0".as_ptr() as *const _);
            self.gl
                .UniformMatrix4fv(location, 1, gl::FALSE, IDENTITY.as_ptr() as *const _);

            self.gl
                .DrawElements(gl::TRIANGLES, 6, gl::UNSIGNED_SHORT, ptr::null());

            self.gl.BindTexture(gl::TEXTURE_2D, 0);
            self.gl.UseProgram(0);

            if self.gl.BindVertexArray.is_loaded() {
                self.gl.BindVertexArray(0);
            }

            {
                self.gl.BindBuffer(gl::ELEMENT_ARRAY_BUFFER, 0);
                self.gl.BindBuffer(gl::ARRAY_BUFFER, 0);

                self.gl.DisableVertexAttribArray(self.attr_position as _);
                self.gl.DisableVertexAttribArray(self.attr_texture as _);
            }
        }
    }

    fn resize(&self, size: glutin::dpi::PhysicalSize) {
        unsafe {
            self.gl
                .Viewport(0, 0, size.width as i32, size.height as i32);
        }
    }
}

210
fn load(gl_context: &glutin::WindowedContext<glutin::PossiblyCurrent>) -> Gl {
211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314
    let gl = gl::Gl::load_with(|ptr| gl_context.get_proc_address(ptr) as *const _);

    let version = unsafe {
        let data = CStr::from_ptr(gl.GetString(gl::VERSION) as *const _)
            .to_bytes()
            .to_vec();
        String::from_utf8(data).unwrap()
    };

    println!("OpenGL version {}", version);

    let (program, attr_position, attr_texture, vao, vertex_buffer, vbo_indices) = unsafe {
        let vs = gl.CreateShader(gl::VERTEX_SHADER);
        gl.ShaderSource(vs, 1, [VS_SRC.as_ptr() as *const _].as_ptr(), ptr::null());
        gl.CompileShader(vs);

        let fs = gl.CreateShader(gl::FRAGMENT_SHADER);
        gl.ShaderSource(fs, 1, [FS_SRC.as_ptr() as *const _].as_ptr(), ptr::null());
        gl.CompileShader(fs);

        let program = gl.CreateProgram();
        gl.AttachShader(program, vs);
        gl.AttachShader(program, fs);
        gl.LinkProgram(program);

        {
            let mut success: gl::types::GLint = 1;
            gl.GetProgramiv(fs, gl::LINK_STATUS, &mut success);
            assert!(success != 0);
        }

        let attr_position = gl.GetAttribLocation(program, b"a_position\0".as_ptr() as *const _);
        let attr_texture = gl.GetAttribLocation(program, b"a_texcoord\0".as_ptr() as *const _);

        let mut vao = mem::uninitialized();
        if gl.BindVertexArray.is_loaded() {
            gl.GenVertexArrays(1, &mut vao);
            gl.BindVertexArray(vao);
        }

        let mut vertex_buffer = mem::uninitialized();
        gl.GenBuffers(1, &mut vertex_buffer);
        gl.BindBuffer(gl::ARRAY_BUFFER, vertex_buffer);
        gl.BufferData(
            gl::ARRAY_BUFFER,
            (VERTICES.len() * mem::size_of::<f32>()) as gl::types::GLsizeiptr,
            VERTICES.as_ptr() as *const _,
            gl::STATIC_DRAW,
        );

        let mut vbo_indices = mem::uninitialized();
        gl.GenBuffers(1, &mut vbo_indices);
        gl.BindBuffer(gl::ELEMENT_ARRAY_BUFFER, vbo_indices);
        gl.BufferData(
            gl::ELEMENT_ARRAY_BUFFER,
            (INDICES.len() * mem::size_of::<u16>()) as gl::types::GLsizeiptr,
            INDICES.as_ptr() as *const _,
            gl::STATIC_DRAW,
        );

        if gl.BindVertexArray.is_loaded() {
            gl.BindBuffer(gl::ELEMENT_ARRAY_BUFFER, vbo_indices);
            gl.BindBuffer(gl::ARRAY_BUFFER, vertex_buffer);

            // Load the vertex position
            gl.VertexAttribPointer(
                attr_position as gl::types::GLuint,
                3,
                gl::FLOAT,
                gl::FALSE,
                (5 * mem::size_of::<f32>()) as gl::types::GLsizei,
                ptr::null(),
            );

            // Load the texture coordinate
            gl.VertexAttribPointer(
                attr_texture as gl::types::GLuint,
                2,
                gl::FLOAT,
                gl::FALSE,
                (5 * mem::size_of::<f32>()) as gl::types::GLsizei,
                (3 * mem::size_of::<f32>()) as *const () as *const _,
            );

            gl.EnableVertexAttribArray(attr_position as _);
            gl.EnableVertexAttribArray(attr_texture as _);

            gl.BindVertexArray(0);
        }

        gl.BindBuffer(gl::ELEMENT_ARRAY_BUFFER, 0);
        gl.BindBuffer(gl::ARRAY_BUFFER, 0);

        (
            program,
            attr_position,
            attr_texture,
            vao,
            vertex_buffer,
            vbo_indices,
        )
    };

    Gl {
315
        gl,
316 317 318 319 320 321 322 323 324 325 326 327
        program,
        attr_position,
        attr_texture,
        vao,
        vertex_buffer,
        vbo_indices,
    }
}

struct App {
    pipeline: gst::Pipeline,
    appsink: gst_app::AppSink,
328
    glupload: gst::Element,
329
    bus: gst::Bus,
330 331
    events_loop: glutin::EventsLoop,
    windowed_context: glutin::WindowedContext<glutin::PossiblyCurrent>,
332
    shared_context: gst_gl::GLContext,
333 334 335 336 337 338
}

impl App {
    fn new() -> Result<App, Error> {
        gst::init()?;

339
        let (pipeline, appsink, glupload) = App::create_pipeline()?;
340 341 342 343 344 345
        let bus = pipeline
            .get_bus()
            .expect("Pipeline without bus. Shouldn't happen!");

        let events_loop = glutin::EventsLoop::new();
        let window = glutin::WindowBuilder::new().with_title("GL rendering");
346 347 348
        let windowed_context = glutin::ContextBuilder::new()
            .with_vsync(true)
            .build_windowed(window, &events_loop)?;
349

350
        let windowed_context = unsafe { windowed_context.make_current().map_err(|(_, err)| err)? };
351 352

        #[cfg(any(feature = "gl-x11", feature = "gl-wayland"))]
353
        let inner_window = windowed_context.window();
354

355
        let shared_context: gst_gl::GLContext;
356 357
        if cfg!(target_os = "linux") {
            use glutin::os::unix::RawHandle;
358 359
            #[cfg(any(feature = "gl-x11", feature = "gl-wayland"))]
            use glutin::os::unix::WindowExt;
360 361
            use glutin::os::ContextTraitExt;

362
            let api = App::map_gl_api(windowed_context.get_api());
363

364 365
            let (gl_context, gl_display, platform) = match unsafe { windowed_context.raw_handle() }
            {
366
                #[cfg(any(feature = "gl-egl", feature = "gl-wayland"))]
367
                RawHandle::Egl(egl_context) => {
368
                    #[cfg(feature = "gl-egl")]
369 370 371 372 373 374 375
                    let gl_display =
                        if let Some(display) = unsafe { windowed_context.get_egl_display() } {
                            unsafe { gst_gl::GLDisplayEGL::new_with_egl_display(display as usize) }
                                .unwrap()
                        } else {
                            panic!("EGL context without EGL display");
                        };
376

377 378 379 380 381 382 383 384
                    #[cfg(not(feature = "gl-egl"))]
                    let gl_display = if let Some(display) = inner_window.get_wayland_display() {
                        unsafe { gst_gl::GLDisplayWayland::new_with_display(display as usize) }
                            .unwrap()
                    } else {
                        panic!("Wayland window without Wayland display");
                    };

385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404
                    (
                        egl_context as usize,
                        gl_display.upcast::<gst_gl::GLDisplay>(),
                        gst_gl::GLPlatform::EGL,
                    )
                }
                #[cfg(feature = "gl-x11")]
                RawHandle::Glx(glx_context) => {
                    let gl_display = if let Some(display) = inner_window.get_xlib_display() {
                        unsafe { gst_gl::GLDisplayX11::new_with_display(display as usize) }.unwrap()
                    } else {
                        panic!("X11 window without X Display");
                    };

                    (
                        glx_context as usize,
                        gl_display.upcast::<gst_gl::GLDisplay>(),
                        gst_gl::GLPlatform::GLX,
                    )
                }
405
                handler => panic!("Unsupported platform: {:?}.", handler),
406 407
            };

408
            shared_context =
409
                unsafe { gst_gl::GLContext::new_wrapped(&gl_display, gl_context, platform, api) }
410 411
                    .unwrap();

412 413 414 415 416 417 418
            shared_context
                .activate(true)
                .expect("Couldn't activate wrapped GL context");

            shared_context.fill_info()?;

            let gl_context = shared_context.clone();
419
            let events_proxy = events_loop.create_proxy();
420

421
            #[allow(clippy::single_match)]
422 423
            bus.set_sync_handler(move |_, msg| {
                match msg.view() {
424
                    gst::MessageView::NeedContext(ctxt) => {
425 426 427 428 429 430 431 432 433
                        let context_type = ctxt.get_context_type();
                        if context_type == *gst_gl::GL_DISPLAY_CONTEXT_TYPE {
                            if let Some(el) =
                                msg.get_src().map(|s| s.downcast::<gst::Element>().unwrap())
                            {
                                let context = gst::Context::new(context_type, true);
                                context.set_gl_display(&gl_display);
                                el.set_context(&context);
                            }
434
                        }
435 436 437
                        if context_type == "gst.gl.app_context" {
                            if let Some(el) =
                                msg.get_src().map(|s| s.downcast::<gst::Element>().unwrap())
438
                            {
439 440 441
                                let mut context = gst::Context::new(context_type, true);
                                {
                                    let context = context.get_mut().unwrap();
442
                                    let s = context.get_mut_structure();
443 444 445
                                    s.set_value("context", gl_context.to_send_value());
                                }
                                el.set_context(&context);
446 447 448
                            }
                        }
                    }
449
                    _ => (),
450 451
                }

452 453
                let _ = events_proxy.wakeup();

454 455 456 457 458
                gst::BusSyncReply::Pass
            });
        } else {
            panic!("This example only has Linux support");
        }
459 460 461 462

        Ok(App {
            pipeline,
            appsink,
463
            glupload,
464
            bus,
465 466
            events_loop: events_loop,
            windowed_context: windowed_context,
467
            shared_context,
468 469 470
        })
    }

471 472 473 474 475
    fn setup(
        &self,
        events_loop: &glutin::EventsLoop,
    ) -> Result<mpsc::Receiver<gst::Sample>, Error> {
        let events_proxy = events_loop.create_proxy();
476 477 478 479
        let (sender, receiver) = mpsc::channel();
        self.appsink.set_callbacks(
            gst_app::AppSinkCallbacks::new()
                .new_sample(move |appsink| {
480
                    let sample = appsink.pull_sample().ok_or(gst::FlowError::Eos)?;
481

482 483
                    {
                        let _buffer = sample.get_buffer().ok_or_else(|| {
484 485 486
                            gst_element_error!(
                                appsink,
                                gst::ResourceError::Failed,
487
                                ("Failed to get buffer from appsink")
488 489 490 491 492
                            );

                            gst::FlowError::Error
                        })?;

493 494 495 496 497 498 499 500 501 502 503 504 505 506
                        let _info = sample
                            .get_caps()
                            .and_then(|caps| gst_video::VideoInfo::from_caps(caps))
                            .ok_or_else(|| {
                                gst_element_error!(
                                    appsink,
                                    gst::ResourceError::Failed,
                                    ("Failed to get video info from sample")
                                );

                                gst::FlowError::Error
                            })?;
                    }

507
                    sender
508 509
                        .send(sample)
                        .map(|_| gst::FlowSuccess::Ok)
510 511 512 513 514
                        .map_err(|_| gst::FlowError::Error)?;

                    let _ = events_proxy.wakeup();

                    Ok(gst::FlowSuccess::Ok)
515 516 517 518
                })
                .build(),
        );

519
        self.pipeline.set_state(gst::State::Playing)?;
520

521
        Ok(receiver)
522 523 524 525 526 527 528 529 530 531
    }

    fn map_gl_api(api: glutin::Api) -> gst_gl::GLAPI {
        match api {
            glutin::Api::OpenGl => gst_gl::GLAPI::OPENGL3,
            glutin::Api::OpenGlEs => gst_gl::GLAPI::GLES2,
            _ => gst_gl::GLAPI::NONE,
        }
    }

532
    fn create_pipeline() -> Result<(gst::Pipeline, gst_app::AppSink, gst::Element), Error> {
533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557
        let pipeline = gst::Pipeline::new(None);
        let src = gst::ElementFactory::make("videotestsrc", None)
            .ok_or(MissingElement("videotestsrc"))?;
        let sink =
            gst::ElementFactory::make("glsinkbin", None).ok_or(MissingElement("glsinkbin"))?;

        pipeline.add_many(&[&src, &sink])?;
        src.link(&sink)?;

        let appsink = gst::ElementFactory::make("appsink", None)
            .ok_or(MissingElement("appsink"))?
            .dynamic_cast::<gst_app::AppSink>()
            .expect("Sink element is expected to be an appsink!");

        sink.set_property("sink", &appsink)?;

        appsink.set_property("enable-last-sample", &false.to_value())?;
        appsink.set_property("emit-signals", &false.to_value())?;
        appsink.set_property("max-buffers", &1u32.to_value())?;

        let caps = gst::Caps::builder("video/x-raw")
            .features(&[&gst_gl::CAPS_FEATURE_MEMORY_GL_MEMORY])
            .field("format", &gst_video::VideoFormat::Rgba.to_string())
            .field("texture-target", &"2D")
            .build();
558
        appsink.set_caps(Some(&caps));
559

560 561 562 563 564 565 566 567 568 569 570 571 572 573 574
        // get the glupload element to extract later the used context in it
        let mut iter = sink.dynamic_cast::<gst::Bin>().unwrap().iterate_elements();
        let glupload = loop {
            match iter.next() {
                Ok(Some(element)) => {
                    if "glupload" == element.get_factory().unwrap().get_name() {
                        break Some(element);
                    }
                }
                Err(gst::IteratorError::Resync) => iter.resync(),
                _ => break None,
            }
        };

        Ok((pipeline, appsink, glupload.unwrap()))
575 576
    }

577
    fn handle_messages(bus: &gst::Bus) -> Result<(), Error> {
578 579
        use gst::MessageView;

580
        for msg in bus.iter() {
581 582 583 584 585 586
            match msg.view() {
                MessageView::Eos(..) => break,
                MessageView::Error(err) => {
                    Err(ErrorMessage {
                        src: msg
                            .get_src()
587
                            .map(|s| String::from(s.get_path_string()))
588 589
                            .unwrap_or_else(|| String::from("None")),
                        error: err.get_error().description().into(),
590
                        debug: Some(err.get_debug().unwrap().to_string()),
591 592 593 594 595 596 597 598 599
                        cause: err.get_error(),
                    })?;
                }
                _ => (),
            }
        }

        Ok(())
    }
600 601 602 603

    fn into_context(self: App) -> glutin::WindowedContext<glutin::PossiblyCurrent> {
        self.windowed_context
    }
604 605
}

606
fn main_loop(mut app: App) -> Result<glutin::WindowedContext<glutin::PossiblyCurrent>, Error> {
607 608
    println!(
        "Pixel format of the window's GL context {:?}",
609
        app.windowed_context.get_pixel_format()
610 611
    );

612
    let gl = load(&app.windowed_context);
613

614
    let receiver = app.setup(&app.events_loop)?;
615

616
    let mut curr_frame: Option<gst_video::VideoFrame<gst_video::video_frame::Readable>> = None;
617
    let mut running = true;
618
    let mut gst_gl_context: Option<gst_gl::GLContext> = None;
619 620 621
    let events_loop = &mut app.events_loop;
    let windowed_context = &mut app.windowed_context;
    let bus = &app.bus;
622

623
    while running {
624
        #[allow(clippy::single_match)]
625 626 627 628
        events_loop.poll_events(|event| match event {
            glutin::Event::WindowEvent { event, .. } => match event {
                glutin::WindowEvent::CloseRequested => running = false,
                glutin::WindowEvent::Resized(logical_size) => {
629
                    let dpi_factor = windowed_context.window().get_hidpi_factor();
630
                    windowed_context.resize(logical_size.to_physical(dpi_factor));
631 632 633 634 635 636 637
                    gl.resize(logical_size.to_physical(dpi_factor));
                }
                _ => (),
            },
            _ => (),
        });

638 639 640 641
        // Handle all pending messages. Whenever there is a message we will
        // wake up the events loop above
        App::handle_messages(&bus)?;

642
        // get the last frame in channel
643
        if let Some(sample) = receiver.try_iter().last() {
644 645 646
            let buffer = sample.get_buffer().unwrap();
            let info = sample
                .get_caps()
647
                .and_then(|caps| gst_video::VideoInfo::from_caps(caps))
648
                .unwrap();
649 650 651 652 653 654 655 656 657 658

            {
                if gst_gl_context.is_none() {
                    gst_gl_context = app
                        .glupload
                        .get_property("context")
                        .unwrap()
                        .get::<gst_gl::GLContext>();
                }

659
                let sync_meta = buffer.get_meta::<gst_gl::GLSyncMeta>().unwrap();
660 661 662
                sync_meta.set_sync_point(gst_gl_context.as_ref().unwrap());
            }

663 664 665
            if let Ok(frame) =
                gst_video::VideoFrame::from_buffer_readable_gl(buffer.to_owned(), &info)
            {
666
                curr_frame = Some(frame);
667 668 669
            }
        }

670
        if let Some(frame) = curr_frame.as_ref() {
671 672
            let sync_meta = frame.buffer().get_meta::<gst_gl::GLSyncMeta>().unwrap();
            sync_meta.wait(&app.shared_context);
673 674 675 676
            if let Some(texture) = frame.get_texture_id(0) {
                gl.draw_frame(texture as gl::types::GLuint);
            }
        }
677
        windowed_context.swap_buffers()?;
678 679 680
    }

    app.pipeline.send_event(gst::Event::new_eos().build());
681
    app.pipeline.set_state(gst::State::Null)?;
682

683 684 685 686 687 688 689 690 691 692 693 694
    Ok(app.into_context())
}

fn cleanup(
    _windowed_context: glutin::WindowedContext<glutin::PossiblyCurrent>,
) -> Result<(), failure::Error> {
    // To ensure that the context stays alive longer than the pipeline or any reference
    // inside GStreamer to the GL context, its display or anything else. See
    // https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/issues/196
    //
    // We might do any window/GL specific cleanup here as needed.

695 696 697 698
    Ok(())
}

fn example_main() {
699
    match App::new().and_then(main_loop).and_then(cleanup) {
700 701 702 703 704 705 706 707
        Ok(r) => r,
        Err(e) => eprintln!("Error! {}", e),
    }
}

fn main() {
    examples_common::run(example_main);
}