Commit 6eda6443 authored by Josh Matthews's avatar Josh Matthews Committed by Sebastian Dröge

Add device provider for AVFoundation capture devices.

parent fffb2aa1
Pipeline #18230 passed with stages
in 22 minutes and 26 seconds
......@@ -109,6 +109,7 @@ if HAVE_AVFOUNDATION
libgstapplemedia_la_SOURCES += \
avfvideosrc.m \
avfdeviceprovider.m \
avfassetsrc.m \
avsamplevideosink.m
......
/* GStreamer
* Copyright (C) 2019 Josh Matthews <josh@joshmatthews.net>
*
* avfdeviceprovider.h: AVF device probing and monitoring
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_AVF_DEVICE_PROVIDER_H__
#define __GST_AVF_DEVICE_PROVIDER_H__
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include "avfvideosrc.h"
G_BEGIN_DECLS
typedef struct _GstAVFDeviceProvider GstAVFDeviceProvider;
typedef struct _GstAVFDeviceProviderPrivate GstAVFDeviceProviderPrivate;
typedef struct _GstAVFDeviceProviderClass GstAVFDeviceProviderClass;
#define GST_TYPE_AVF_DEVICE_PROVIDER (gst_avf_device_provider_get_type())
#define GST_IS_AVF_DEVICE_PROVIDER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_AVF_DEVICE_PROVIDER))
#define GST_IS_AVF_DEVICE_PROVIDER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_AVF_DEVICE_PROVIDER))
#define GST_AVF_DEVICE_PROVIDER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_AVF_DEVICE_PROVIDER, GstAVFDeviceProviderClass))
#define GST_AVF_DEVICE_PROVIDER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_AVF_DEVICE_PROVIDER, GstAVFDeviceProvider))
#define GST_AVF_DEVICE_PROVIDER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_DEVICE_PROVIDER, GstAVFDeviceProviderClass))
#define GST_AVF_DEVICE_PROVIDER_CAST(obj) ((GstAvfDeviceProvider *)(obj))
struct _GstAVFDeviceProvider {
GstDeviceProvider parent;
};
typedef enum {
GST_AVF_DEVICE_TYPE_INVALID = 0,
GST_AVF_DEVICE_TYPE_VIDEO_SOURCE,
} GstAvfDeviceType;
struct _GstAVFDeviceProviderClass {
GstDeviceProviderClass parent_class;
};
GType gst_avf_device_provider_get_type (void);
typedef struct _GstAvfDevice GstAvfDevice;
typedef struct _GstAvfDevicePrivate GstAvfDevicePrivate;
typedef struct _GstAvfDeviceClass GstAvfDeviceClass;
#define GST_TYPE_AVF_DEVICE (gst_avf_device_get_type())
#define GST_IS_AVF_DEVICE(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_AVF_DEVICE))
#define GST_IS_AVF_DEVICE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_AVF_DEVICE))
#define GST_AVF_DEVICE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_AVF_DEVICE, GstAvfDeviceClass))
#define GST_AVF_DEVICE(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_AVF_DEVICE, GstAvfDevice))
#define GST_AVF_DEVICE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_DEVICE, GstAvfDeviceClass))
#define GST_AVF_DEVICE_CAST(obj) ((GstAvfDevice *)(obj))
struct _GstAvfDevice {
GstDevice parent;
GstAvfDeviceType type;
int device_index;
const gchar *element;
};
struct _GstAvfDeviceClass {
GstDeviceClass parent_class;
};
GType gst_avf_device_get_type (void);
G_END_DECLS
#endif /* __GST_AVF_DEVICE_PROVIDER_H__ */
/* GStreamer
* Copyright (C) 2019 Josh Matthews <josh@joshmatthews.net>
*
* avfdeviceprovider.c: AVF device probing and monitoring
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#import <AVFoundation/AVFoundation.h>
#include "avfvideosrc.h"
#include "avfdeviceprovider.h"
#include <string.h>
#include <gst/gst.h>
static GstDevice *gst_avf_device_new (const gchar * device_name, int device_index,
GstCaps * caps, GstAvfDeviceType type);
G_DEFINE_TYPE (GstAVFDeviceProvider, gst_avf_device_provider,
GST_TYPE_DEVICE_PROVIDER);
static GList *gst_avf_device_provider_probe (GstDeviceProvider * provider);
static void
gst_avf_device_provider_class_init (GstAVFDeviceProviderClass * klass)
{
GstDeviceProviderClass *dm_class = GST_DEVICE_PROVIDER_CLASS (klass);
// TODO: Add start/stop callbacks to receive device notifications.
// https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/issues/886
dm_class->probe = gst_avf_device_provider_probe;
gst_device_provider_class_set_static_metadata (dm_class,
"AVF Device Provider", "Source/Video",
"List and provide AVF source devices",
"Josh Matthews <josh@joshmatthews.net>");
}
static void
gst_avf_device_provider_init (GstAVFDeviceProvider * self)
{
}
static GList *
gst_avf_device_provider_probe (GstDeviceProvider * provider)
{
GList *result;
result = NULL;
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
for (int i = 0; i < [devices count]; i++) {
AVCaptureDevice *device = [devices objectAtIndex:i];
g_assert (device != nil);
GstCaps *caps = gst_av_capture_device_get_caps (device, output, GST_AVF_VIDEO_SOURCE_ORIENTATION_DEFAULT);
const gchar *deviceName = [[device localizedName] UTF8String];
GstDevice *gst_device = gst_avf_device_new (deviceName, i, caps, GST_AVF_DEVICE_TYPE_VIDEO_SOURCE);
result = g_list_prepend (result, gst_object_ref_sink (gst_device));
}
result = g_list_reverse (result);
return result;
}
enum
{
PROP_DEVICE_INDEX = 1
};
G_DEFINE_TYPE (GstAvfDevice, gst_avf_device, GST_TYPE_DEVICE);
static GstElement *gst_avf_device_create_element (GstDevice * device,
const gchar * name);
static gboolean gst_avf_device_reconfigure_element (GstDevice * device,
GstElement * element);
static void gst_avf_device_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_avf_device_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void
gst_avf_device_class_init (GstAvfDeviceClass * klass)
{
GstDeviceClass *dev_class = GST_DEVICE_CLASS (klass);
GObjectClass *object_class = G_OBJECT_CLASS (klass);
dev_class->create_element = gst_avf_device_create_element;
dev_class->reconfigure_element = gst_avf_device_reconfigure_element;
object_class->get_property = gst_avf_device_get_property;
object_class->set_property = gst_avf_device_set_property;
g_object_class_install_property (object_class, PROP_DEVICE_INDEX,
g_param_spec_int ("device-index", "Device Index",
"The zero-based device index", -1, G_MAXINT, 0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT_ONLY));
}
static void
gst_avf_device_init (GstAvfDevice * device)
{
}
static GstElement *
gst_avf_device_create_element (GstDevice * device, const gchar * name)
{
GstAvfDevice *avf_dev = GST_AVF_DEVICE (device);
GstElement *elem;
elem = gst_element_factory_make (avf_dev->element, name);
g_object_set (elem, "device-index", avf_dev->device_index, NULL);
return elem;
}
static gboolean
gst_avf_device_reconfigure_element (GstDevice * device, GstElement * element)
{
GstAvfDevice *avf_dev = GST_AVF_DEVICE (device);
if (!strcmp (avf_dev->element, "avfvideosrc") && GST_IS_AVF_VIDEO_SRC (element)) {
g_object_set (element, "device-index", avf_dev->device_index, NULL);
return TRUE;
}
return FALSE;
}
static void
gst_avf_device_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstAvfDevice *device;
device = GST_AVF_DEVICE_CAST (object);
switch (prop_id) {
case PROP_DEVICE_INDEX:
g_value_set_int (value, device->device_index);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_avf_device_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstAvfDevice *device;
device = GST_AVF_DEVICE_CAST (object);
switch (prop_id) {
case PROP_DEVICE_INDEX:
device->device_index = g_value_get_int (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static GstDevice *
gst_avf_device_new (const gchar * device_name, int device_index, GstCaps * caps, GstAvfDeviceType type)
{
GstAvfDevice *gstdev;
const gchar *element = NULL;
const gchar *klass = NULL;
g_return_val_if_fail (device_name, NULL);
g_return_val_if_fail (caps, NULL);
switch (type) {
case GST_AVF_DEVICE_TYPE_VIDEO_SOURCE:
element = "avfvideosrc";
klass = "Video/Source";
break;
default:
g_assert_not_reached ();
break;
}
gstdev = g_object_new (GST_TYPE_AVF_DEVICE,
"display-name", device_name, "caps", caps, "device-class", klass,
"device-index", device_index, NULL);
gstdev->type = type;
gstdev->element = element;
return GST_DEVICE (gstdev);
}
......@@ -20,6 +20,7 @@
#ifndef __GST_AVF_VIDEO_SRC_H__
#define __GST_AVF_VIDEO_SRC_H__
#import <AVFoundation/AVFoundation.h>
#include <gst/base/gstpushsrc.h>
G_BEGIN_DECLS
......@@ -82,6 +83,8 @@ struct _GstAVFVideoSrcClass
GType gst_avf_video_src_get_type (void);
GstCaps *gst_av_capture_device_get_caps (AVCaptureDevice *device, AVCaptureVideoDataOutput *output, GstAVFVideoSourceOrientation orientation);
G_END_DECLS
#endif /* __GST_AVF_VIDEO_SRC_H__ */
......@@ -48,6 +48,10 @@
GST_DEBUG_CATEGORY (gst_avf_video_src_debug);
#define GST_CAT_DEFAULT gst_avf_video_src_debug
static GstVideoFormat get_gst_video_format(NSNumber *pixel_format);
static CMVideoDimensions
get_oriented_dimensions(GstAVFVideoSourceOrientation orientation, CMVideoDimensions dimensions);
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
......@@ -507,27 +511,11 @@ static AVCaptureVideoOrientation GstAVFVideoSourceOrientation2AVCaptureVideoOrie
- (GstVideoFormat)getGstVideoFormat:(NSNumber *)pixel_format
{
GstVideoFormat gst_format = GST_VIDEO_FORMAT_UNKNOWN;
switch ([pixel_format integerValue]) {
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: /* 420v */
gst_format = GST_VIDEO_FORMAT_NV12;
break;
case kCVPixelFormatType_422YpCbCr8: /* 2vuy */
gst_format = GST_VIDEO_FORMAT_UYVY;
break;
case kCVPixelFormatType_32BGRA: /* BGRA */
gst_format = GST_VIDEO_FORMAT_BGRA;
break;
case kCVPixelFormatType_422YpCbCr8_yuvs: /* yuvs */
gst_format = GST_VIDEO_FORMAT_YUY2;
break;
default:
GstVideoFormat gst_format = get_gst_video_format(pixel_format);
if (gst_format == GST_VIDEO_FORMAT_UNKNOWN) {
GST_LOG_OBJECT (element, "Pixel format %s is not handled by avfvideosrc",
[[pixel_format stringValue] UTF8String]);
break;
}
return gst_format;
}
......@@ -568,96 +556,16 @@ static AVCaptureVideoOrientation GstAVFVideoSourceOrientation2AVCaptureVideoOrie
- (CMVideoDimensions)orientedDimensions:(CMVideoDimensions)dimensions
{
CMVideoDimensions orientedDimensions;
if (orientation == GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT_UPSIDE_DOWN ||
orientation == GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT) {
orientedDimensions.width = dimensions.height;
orientedDimensions.height = dimensions.width;
} else {
orientedDimensions = dimensions;
}
return orientedDimensions;
return get_oriented_dimensions(orientation, dimensions);
}
- (GstCaps *)getDeviceCaps
{
NSArray *formats = [device valueForKey:@"formats"];
NSArray *pixel_formats = output.availableVideoCVPixelFormatTypes;
GstCaps *result_caps, *result_gl_caps;
#if !HAVE_IOS
GstVideoFormat gl_format = GST_VIDEO_FORMAT_UYVY;
#else
GstVideoFormat gl_format = GST_VIDEO_FORMAT_NV12;
#endif
GST_DEBUG_OBJECT (element, "Getting device caps");
GstCaps *device_caps = gst_av_capture_device_get_caps (device, output, orientation);
GST_DEBUG_OBJECT (element, "Device returned the following caps %" GST_PTR_FORMAT, device_caps);
result_caps = gst_caps_new_empty ();
result_gl_caps = gst_caps_new_empty ();
/* Do not use AVCaptureDeviceFormat or AVFrameRateRange only
* available in iOS >= 7.0. We use a dynamic approach with key-value
* coding or performSelector */
for (NSObject *f in [formats reverseObjectEnumerator]) {
/* formatDescription can't be retrieved with valueForKey so use a selector here */
CMFormatDescriptionRef formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
dimensions = [self orientedDimensions:dimensions];
for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
int min_fps_n, min_fps_d, max_fps_n, max_fps_d;
gdouble min_fps, max_fps;
[[rate valueForKey:@"minFrameRate"] getValue:&min_fps];
gst_util_double_to_fraction (min_fps, &min_fps_n, &min_fps_d);
[[rate valueForKey:@"maxFrameRate"] getValue:&max_fps];
gst_util_double_to_fraction (max_fps, &max_fps_n, &max_fps_d);
for (NSNumber *pixel_format in pixel_formats) {
GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
if (gst_format != GST_VIDEO_FORMAT_UNKNOWN) {
if (min_fps != max_fps)
gst_caps_append (result_caps, GST_AVF_FPS_RANGE_CAPS_NEW (gst_format, dimensions.width, dimensions.height, min_fps_n, min_fps_d, max_fps_n, max_fps_d));
else
gst_caps_append (result_caps, GST_AVF_CAPS_NEW (gst_format, dimensions.width, dimensions.height, max_fps_n, max_fps_d));
}
if (gst_format == gl_format) {
GstCaps *gl_caps;
if (min_fps != max_fps) {
gl_caps = GST_AVF_FPS_RANGE_CAPS_NEW (gl_format,
dimensions.width, dimensions.height,
min_fps_n, min_fps_d,
max_fps_n, max_fps_d);
} else {
gl_caps = GST_AVF_CAPS_NEW (gl_format,
dimensions.width, dimensions.height,
max_fps_n, max_fps_d);
}
gst_caps_set_features (gl_caps, 0,
gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
NULL));
gst_caps_set_simple (gl_caps,
"texture-target", G_TYPE_STRING,
#if !HAVE_IOS
GST_GL_TEXTURE_TARGET_RECTANGLE_STR,
#else
GST_GL_TEXTURE_TARGET_2D_STR,
#endif
NULL);
gst_caps_append (result_gl_caps, gl_caps);
}
}
}
}
result_gl_caps = gst_caps_simplify (gst_caps_merge (result_gl_caps, result_caps));
GST_DEBUG_OBJECT (element, "Device returned the following caps %" GST_PTR_FORMAT, result_gl_caps);
return result_gl_caps;
return device_caps;
}
- (BOOL)setDeviceCaps:(GstVideoInfo *)info
......@@ -1605,3 +1513,120 @@ gst_avf_video_src_set_context (GstElement * element, GstContext * context)
{
[GST_AVF_VIDEO_SRC_IMPL (element) setContext:context];
}
GstCaps*
gst_av_capture_device_get_caps (AVCaptureDevice *device, AVCaptureVideoDataOutput *output, GstAVFVideoSourceOrientation orientation)
{
NSArray *formats = [device valueForKey:@"formats"];
NSArray *pixel_formats = output.availableVideoCVPixelFormatTypes;
GstCaps *result_caps, *result_gl_caps;
#if !HAVE_IOS
GstVideoFormat gl_format = GST_VIDEO_FORMAT_UYVY;
#else
GstVideoFormat gl_format = GST_VIDEO_FORMAT_NV12;
#endif
result_caps = gst_caps_new_empty ();
result_gl_caps = gst_caps_new_empty ();
/* Do not use AVCaptureDeviceFormat or AVFrameRateRange only
* available in iOS >= 7.0. We use a dynamic approach with key-value
* coding or performSelector */
for (NSObject *f in [formats reverseObjectEnumerator]) {
/* formatDescription can't be retrieved with valueForKey so use a selector here */
CMFormatDescriptionRef formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions (formatDescription);
dimensions = get_oriented_dimensions (orientation, dimensions);
for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
int min_fps_n, min_fps_d, max_fps_n, max_fps_d;
gdouble min_fps, max_fps;
[[rate valueForKey:@"minFrameRate"] getValue:&min_fps];
gst_util_double_to_fraction (min_fps, &min_fps_n, &min_fps_d);
[[rate valueForKey:@"maxFrameRate"] getValue:&max_fps];
gst_util_double_to_fraction (max_fps, &max_fps_n, &max_fps_d);
for (NSNumber *pixel_format in pixel_formats) {
GstVideoFormat gst_format = get_gst_video_format (pixel_format);
if (gst_format != GST_VIDEO_FORMAT_UNKNOWN) {
if (min_fps != max_fps)
gst_caps_append (result_caps, GST_AVF_FPS_RANGE_CAPS_NEW (gst_format, dimensions.width, dimensions.height, min_fps_n, min_fps_d, max_fps_n, max_fps_d));
else
gst_caps_append (result_caps, GST_AVF_CAPS_NEW (gst_format, dimensions.width, dimensions.height, max_fps_n, max_fps_d));
}
if (gst_format == gl_format) {
GstCaps *gl_caps;
if (min_fps != max_fps) {
gl_caps = GST_AVF_FPS_RANGE_CAPS_NEW (gl_format,
dimensions.width, dimensions.height,
min_fps_n, min_fps_d,
max_fps_n, max_fps_d);
} else {
gl_caps = GST_AVF_CAPS_NEW (gl_format,
dimensions.width, dimensions.height,
max_fps_n, max_fps_d);
}
gst_caps_set_features (gl_caps, 0,
gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
NULL));
gst_caps_set_simple (gl_caps,
"texture-target", G_TYPE_STRING,
#if !HAVE_IOS
GST_GL_TEXTURE_TARGET_RECTANGLE_STR,
#else
GST_GL_TEXTURE_TARGET_2D_STR,
#endif
NULL);
gst_caps_append (result_gl_caps, gl_caps);
}
}
}
}
result_gl_caps = gst_caps_simplify (gst_caps_merge (result_gl_caps, result_caps));
return result_gl_caps;
}
static GstVideoFormat
get_gst_video_format (NSNumber *pixel_format)
{
GstVideoFormat gst_format = GST_VIDEO_FORMAT_UNKNOWN;
switch ([pixel_format integerValue]) {
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: /* 420v */
gst_format = GST_VIDEO_FORMAT_NV12;
break;
case kCVPixelFormatType_422YpCbCr8: /* 2vuy */
gst_format = GST_VIDEO_FORMAT_UYVY;
break;
case kCVPixelFormatType_32BGRA: /* BGRA */
gst_format = GST_VIDEO_FORMAT_BGRA;
break;
case kCVPixelFormatType_422YpCbCr8_yuvs: /* yuvs */
gst_format = GST_VIDEO_FORMAT_YUY2;
break;
default:
break;
}
return gst_format;
}
static CMVideoDimensions
get_oriented_dimensions (GstAVFVideoSourceOrientation orientation, CMVideoDimensions dimensions)
{
CMVideoDimensions orientedDimensions;
if (orientation == GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT_UPSIDE_DOWN ||
orientation == GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT) {
orientedDimensions.width = dimensions.height;
orientedDimensions.height = dimensions.width;
} else {
orientedDimensions = dimensions;
}
return orientedDimensions;
}
......@@ -35,7 +35,8 @@ if avfoundation_dep.found()
applemedia_sources += [
'avfvideosrc.m',
'avfassetsrc.m',
'avsamplevideosink.m'
'avsamplevideosink.m',
'avfdeviceprovider.m',
]
applemedia_frameworks += [avfoundation_dep]
endif
......
......@@ -30,6 +30,7 @@
#ifdef HAVE_AVFOUNDATION
#include "avfvideosrc.h"
#include "avfassetsrc.h"
#include "avfdeviceprovider.h"
#include "avsamplevideosink.h"
#endif
#ifdef HAVE_VIDEOTOOLBOX
......@@ -80,6 +81,8 @@ plugin_init (GstPlugin * plugin)
GST_TYPE_AVF_ASSET_SRC);
res &= gst_element_register (plugin, "avsamplebufferlayersink",
GST_RANK_NONE, GST_TYPE_AV_SAMPLE_VIDEO_SINK);
res &= gst_device_provider_register (plugin, "avfdeviceprovider",
GST_RANK_PRIMARY, GST_TYPE_AVF_DEVICE_PROVIDER);
#endif
res &= gst_element_register (plugin, "atdec", GST_RANK_MARGINAL, GST_TYPE_ATDEC);
......