Commit 6354769d authored by Sebastian Dröge's avatar Sebastian Dröge 🍵 Committed by Sebastian Dröge

decklink: Add support for parsing/outputting AFD/Bar

parent ba21c210
Pipeline #35009 passed with stages
in 35 minutes and 13 seconds
...@@ -244,6 +244,7 @@ enum ...@@ -244,6 +244,7 @@ enum
PROP_KEYER_LEVEL, PROP_KEYER_LEVEL,
PROP_HW_SERIAL_NUMBER, PROP_HW_SERIAL_NUMBER,
PROP_CC_LINE, PROP_CC_LINE,
PROP_AFD_BAR_LINE,
}; };
static void gst_decklink_video_sink_set_property (GObject * object, static void gst_decklink_video_sink_set_property (GObject * object,
...@@ -388,6 +389,13 @@ gst_decklink_video_sink_class_init (GstDecklinkVideoSinkClass * klass) ...@@ -388,6 +389,13 @@ gst_decklink_video_sink_class_init (GstDecklinkVideoSinkClass * klass)
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
G_PARAM_CONSTRUCT))); G_PARAM_CONSTRUCT)));
g_object_class_install_property (gobject_class, PROP_AFD_BAR_LINE,
g_param_spec_int ("afd-bar-line", "AFD/Bar Line",
"Line number to use for inserting AFD/Bar data (0 = disabled)", 0,
10000, 0,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
G_PARAM_CONSTRUCT)));
templ_caps = gst_decklink_mode_get_template_caps (FALSE); templ_caps = gst_decklink_mode_get_template_caps (FALSE);
templ_caps = gst_caps_make_writable (templ_caps); templ_caps = gst_caps_make_writable (templ_caps);
/* For output we support any framerate and only really care about timestamps */ /* For output we support any framerate and only really care about timestamps */
...@@ -415,6 +423,7 @@ gst_decklink_video_sink_init (GstDecklinkVideoSink * self) ...@@ -415,6 +423,7 @@ gst_decklink_video_sink_init (GstDecklinkVideoSink * self)
/* VITC is legacy, we should expect RP188 in modern use cases */ /* VITC is legacy, we should expect RP188 in modern use cases */
self->timecode_format = bmdTimecodeRP188Any; self->timecode_format = bmdTimecodeRP188Any;
self->caption_line = 0; self->caption_line = 0;
self->afd_bar_line = 0;
gst_base_sink_set_max_lateness (GST_BASE_SINK_CAST (self), 20 * GST_MSECOND); gst_base_sink_set_max_lateness (GST_BASE_SINK_CAST (self), 20 * GST_MSECOND);
gst_base_sink_set_qos_enabled (GST_BASE_SINK_CAST (self), TRUE); gst_base_sink_set_qos_enabled (GST_BASE_SINK_CAST (self), TRUE);
...@@ -469,6 +478,9 @@ gst_decklink_video_sink_set_property (GObject * object, guint property_id, ...@@ -469,6 +478,9 @@ gst_decklink_video_sink_set_property (GObject * object, guint property_id,
case PROP_CC_LINE: case PROP_CC_LINE:
self->caption_line = g_value_get_int (value); self->caption_line = g_value_get_int (value);
break; break;
case PROP_AFD_BAR_LINE:
self->afd_bar_line = g_value_get_int (value);
break;
default: default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break; break;
...@@ -515,6 +527,9 @@ gst_decklink_video_sink_get_property (GObject * object, guint property_id, ...@@ -515,6 +527,9 @@ gst_decklink_video_sink_get_property (GObject * object, guint property_id,
case PROP_CC_LINE: case PROP_CC_LINE:
g_value_set_int (value, self->caption_line); g_value_set_int (value, self->caption_line);
break; break;
case PROP_AFD_BAR_LINE:
g_value_set_int (value, self->afd_bar_line);
break;
default: default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break; break;
...@@ -613,13 +628,15 @@ gst_decklink_video_sink_set_caps (GstBaseSink * bsink, GstCaps * caps) ...@@ -613,13 +628,15 @@ gst_decklink_video_sink_set_caps (GstBaseSink * bsink, GstCaps * caps)
* Note that this flag will have no effect in practice if the video stream * Note that this flag will have no effect in practice if the video stream
* does not contain timecode metadata. * does not contain timecode metadata.
*/ */
if ((gint64) self->timecode_format == (gint64) GST_DECKLINK_TIMECODE_FORMAT_VITC || if ((gint64) self->timecode_format ==
(gint64) self->timecode_format == (gint64) GST_DECKLINK_TIMECODE_FORMAT_VITCFIELD2) (gint64) GST_DECKLINK_TIMECODE_FORMAT_VITC
|| (gint64) self->timecode_format ==
(gint64) GST_DECKLINK_TIMECODE_FORMAT_VITCFIELD2)
flags = bmdVideoOutputVITC; flags = bmdVideoOutputVITC;
else else
flags = bmdVideoOutputRP188; flags = bmdVideoOutputRP188;
if (self->caption_line > 0) if (self->caption_line > 0 || self->afd_bar_line > 0)
flags = (BMDVideoOutputFlags) (flags | bmdVideoOutputVANC); flags = (BMDVideoOutputFlags) (flags | bmdVideoOutputVANC);
ret = self->output->output->EnableVideoOutput (mode->mode, flags); ret = self->output->output->EnableVideoOutput (mode->mode, flags);
...@@ -771,7 +788,7 @@ gst_decklink_video_sink_convert_to_internal_clock (GstDecklinkVideoSink * self, ...@@ -771,7 +788,7 @@ gst_decklink_video_sink_convert_to_internal_clock (GstDecklinkVideoSink * self,
} }
if (external_base != GST_CLOCK_TIME_NONE && if (external_base != GST_CLOCK_TIME_NONE &&
internal_base != GST_CLOCK_TIME_NONE) internal_base != GST_CLOCK_TIME_NONE)
*timestamp += internal_offset; *timestamp += internal_offset;
else else
*timestamp = gst_clock_get_internal_time (self->output->clock); *timestamp = gst_clock_get_internal_time (self->output->clock);
...@@ -883,6 +900,287 @@ convert_cea708_cc_data_cea708_cdp_internal (GstDecklinkVideoSink * self, ...@@ -883,6 +900,287 @@ convert_cea708_cc_data_cea708_cdp_internal (GstDecklinkVideoSink * self,
return len; return len;
} }
static void
write_vbi (GstDecklinkVideoSink * self, GstBuffer * buffer,
BMDPixelFormat format, IDeckLinkMutableVideoFrame * frame,
GstVideoTimeCodeMeta * tc_meta)
{
IDeckLinkVideoFrameAncillary *vanc_frame = NULL;
gpointer iter = NULL;
GstVideoCaptionMeta *cc_meta;
guint8 *vancdata;
gboolean got_captions = FALSE;
if (self->caption_line == 0 && self->afd_bar_line == 0)
return;
if (self->vbiencoder == NULL) {
self->vbiencoder =
gst_video_vbi_encoder_new (self->info.finfo->format, self->info.width);
self->anc_vformat = self->info.finfo->format;
}
/* Put any closed captions into the configured line */
while ((cc_meta =
(GstVideoCaptionMeta *) gst_buffer_iterate_meta_filtered (buffer,
&iter, GST_VIDEO_CAPTION_META_API_TYPE))) {
switch (cc_meta->caption_type) {
case GST_VIDEO_CAPTION_TYPE_CEA608_RAW:{
guint8 data[138];
guint i, n;
n = cc_meta->size / 2;
if (cc_meta->size > 46) {
GST_WARNING_OBJECT (self, "Too big raw CEA608 buffer");
break;
}
/* This is the offset from line 9 for 525-line fields and from line
* 5 for 625-line fields.
*
* The highest bit is set for field 1 but not for field 0, but we
* have no way of knowning the field here
*/
for (i = 0; i < n; i++) {
data[3 * i] = 0x80 | (self->info.height ==
525 ? self->caption_line - 9 : self->caption_line - 5);
data[3 * i + 1] = cc_meta->data[2 * i];
data[3 * i + 2] = cc_meta->data[2 * i + 1];
}
if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
FALSE,
GST_VIDEO_ANCILLARY_DID16_S334_EIA_608 >> 8,
GST_VIDEO_ANCILLARY_DID16_S334_EIA_608 & 0xff, data, 3))
GST_WARNING_OBJECT (self, "Couldn't add meta to ancillary data");
got_captions = TRUE;
break;
}
case GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A:{
if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
FALSE,
GST_VIDEO_ANCILLARY_DID16_S334_EIA_608 >> 8,
GST_VIDEO_ANCILLARY_DID16_S334_EIA_608 & 0xff, cc_meta->data,
cc_meta->size))
GST_WARNING_OBJECT (self, "Couldn't add meta to ancillary data");
got_captions = TRUE;
break;
}
case GST_VIDEO_CAPTION_TYPE_CEA708_RAW:{
guint8 data[256];
guint n;
n = cc_meta->size / 3;
if (cc_meta->size > 46) {
GST_WARNING_OBJECT (self, "Too big raw CEA708 buffer");
break;
}
n = convert_cea708_cc_data_cea708_cdp_internal (self, cc_meta->data,
cc_meta->size, data, sizeof (data), tc_meta);
if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder, FALSE,
GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 >> 8,
GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 & 0xff, data, n))
GST_WARNING_OBJECT (self, "Couldn't add meta to ancillary data");
got_captions = TRUE;
break;
}
case GST_VIDEO_CAPTION_TYPE_CEA708_CDP:{
if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
FALSE,
GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 >> 8,
GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 & 0xff, cc_meta->data,
cc_meta->size))
GST_WARNING_OBJECT (self, "Couldn't add meta to ancillary data");
got_captions = TRUE;
break;
}
default:{
GST_FIXME_OBJECT (self, "Caption type %d not supported",
cc_meta->caption_type);
break;
}
}
}
if ((got_captions || self->afd_bar_line != 0)
&& self->output->output->CreateAncillaryData (format,
&vanc_frame) == S_OK) {
GstVideoAFDMeta *afd_meta = NULL, *afd_meta2 = NULL;
GstVideoBarMeta *bar_meta = NULL, *bar_meta2 = NULL;
GstMeta *meta;
gpointer meta_iter;
guint8 afd_bar_data[8] = { 0, };
guint8 afd_bar_data2[8] = { 0, };
guint8 afd = 0;
gboolean is_letterbox = 0;
guint16 bar1 = 0, bar2 = 0;
guint i;
// Get any reasonable AFD/Bar metas for both fields
meta_iter = NULL;
while ((meta =
gst_buffer_iterate_meta_filtered (buffer, &meta_iter,
GST_VIDEO_AFD_META_API_TYPE))) {
GstVideoAFDMeta *tmp_meta = (GstVideoAFDMeta *) meta;
if (tmp_meta->field == 0 || !afd_meta || (afd_meta && afd_meta->field != 0
&& tmp_meta->field == 0))
afd_meta = tmp_meta;
if (tmp_meta->field == 1 || !afd_meta2 || (afd_meta2
&& afd_meta->field != 1 && tmp_meta->field == 1))
afd_meta2 = tmp_meta;
}
meta_iter = NULL;
while ((meta =
gst_buffer_iterate_meta_filtered (buffer, &meta_iter,
GST_VIDEO_BAR_META_API_TYPE))) {
GstVideoBarMeta *tmp_meta = (GstVideoBarMeta *) meta;
if (tmp_meta->field == 0 || !bar_meta || (bar_meta && bar_meta->field != 0
&& tmp_meta->field == 0))
bar_meta = tmp_meta;
if (tmp_meta->field == 1 || !bar_meta2 || (bar_meta2
&& bar_meta->field != 1 && tmp_meta->field == 1))
bar_meta2 = tmp_meta;
}
for (i = 0; i < 2; i++) {
guint8 *afd_bar_data_ptr;
if (i == 0) {
afd_bar_data_ptr = afd_bar_data;
afd = afd_meta ? afd_meta->afd : 0;
is_letterbox = bar_meta ? bar_meta->is_letterbox : FALSE;
bar1 = bar_meta ? bar_meta->bar_data1 : 0;
bar2 = bar_meta ? bar_meta->bar_data2 : 0;
} else {
afd_bar_data_ptr = afd_bar_data2;
afd = afd_meta2 ? afd_meta2->afd : 0;
is_letterbox = bar_meta2 ? bar_meta2->is_letterbox : FALSE;
bar1 = bar_meta2 ? bar_meta2->bar_data1 : 0;
bar2 = bar_meta2 ? bar_meta2->bar_data2 : 0;
}
/* See SMPTE 2016-3 Section 4 */
/* AFD and AR */
if (self->mode < (gint) GST_DECKLINK_MODE_NTSC_WIDESCREEN) {
afd_bar_data_ptr[0] = (afd << 3) | 0x0;
} else {
afd_bar_data_ptr[0] = (afd << 3) | 0x4;
}
/* Bar flags */
afd_bar_data_ptr[3] = is_letterbox ? 0xc0 : 0x30;
/* Bar value 1 and 2 */
GST_WRITE_UINT16_BE (&afd_bar_data_ptr[4], bar1);
GST_WRITE_UINT16_BE (&afd_bar_data_ptr[6], bar2);
}
/* AFD on the same line as the captions */
if (self->caption_line == self->afd_bar_line) {
if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
FALSE, GST_VIDEO_ANCILLARY_DID16_S2016_3_AFD_BAR >> 8,
GST_VIDEO_ANCILLARY_DID16_S2016_3_AFD_BAR & 0xff, afd_bar_data,
sizeof (afd_bar_data)))
GST_WARNING_OBJECT (self,
"Couldn't add AFD/Bar data to ancillary data");
}
/* FIXME: Add captions to the correct field? Captions for the second
* field should probably be inserted into the second field */
if (got_captions || self->caption_line == self->afd_bar_line) {
if (vanc_frame->GetBufferForVerticalBlankingLine (self->caption_line,
(void **) &vancdata) == S_OK) {
gst_video_vbi_encoder_write_line (self->vbiencoder, vancdata);
} else {
GST_WARNING_OBJECT (self,
"Failed to get buffer for line %d ancillary data",
self->caption_line);
}
}
/* AFD on a different line than the captions */
if (self->afd_bar_line != 0 && self->caption_line != self->afd_bar_line) {
if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
FALSE, GST_VIDEO_ANCILLARY_DID16_S2016_3_AFD_BAR >> 8,
GST_VIDEO_ANCILLARY_DID16_S2016_3_AFD_BAR & 0xff, afd_bar_data,
sizeof (afd_bar_data)))
GST_WARNING_OBJECT (self,
"Couldn't add AFD/Bar data to ancillary data");
if (vanc_frame->GetBufferForVerticalBlankingLine (self->afd_bar_line,
(void **) &vancdata) == S_OK) {
gst_video_vbi_encoder_write_line (self->vbiencoder, vancdata);
} else {
GST_WARNING_OBJECT (self,
"Failed to get buffer for line %d ancillary data",
self->afd_bar_line);
}
}
/* For interlaced video we need to also add AFD to the second field */
if (GST_VIDEO_INFO_IS_INTERLACED (&self->info) && self->afd_bar_line != 0) {
guint field2_offset;
/* The VANC lines for the second field are at an offset, depending on
* the format in use.
*/
switch (self->info.height) {
case 486:
/* NTSC: 525 / 2 + 1 */
field2_offset = 263;
break;
case 576:
/* PAL: 625 / 2 + 1 */
field2_offset = 313;
break;
case 1080:
/* 1080i: 1125 / 2 + 1 */
field2_offset = 563;
break;
default:
g_assert_not_reached ();
}
if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
FALSE, GST_VIDEO_ANCILLARY_DID16_S2016_3_AFD_BAR >> 8,
GST_VIDEO_ANCILLARY_DID16_S2016_3_AFD_BAR & 0xff, afd_bar_data2,
sizeof (afd_bar_data)))
GST_WARNING_OBJECT (self,
"Couldn't add AFD/Bar data to ancillary data");
if (vanc_frame->GetBufferForVerticalBlankingLine (self->afd_bar_line +
field2_offset, (void **) &vancdata) == S_OK) {
gst_video_vbi_encoder_write_line (self->vbiencoder, vancdata);
} else {
GST_WARNING_OBJECT (self,
"Failed to get buffer for line %d ancillary data",
self->afd_bar_line);
}
}
if (frame->SetAncillaryData (vanc_frame) != S_OK) {
GST_WARNING_OBJECT (self, "Failed to set ancillary data");
}
vanc_frame->Release ();
} else if (got_captions || self->afd_bar_line != 0) {
GST_WARNING_OBJECT (self, "Failed to allocate ancillary data frame");
}
}
static GstFlowReturn static GstFlowReturn
gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer) gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer)
{ {
...@@ -1004,131 +1302,7 @@ gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer) ...@@ -1004,131 +1302,7 @@ gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer)
g_free (tc_str); g_free (tc_str);
} }
if (self->caption_line != 0) { write_vbi (self, buffer, format, frame, tc_meta);
IDeckLinkVideoFrameAncillary *vanc_frame = NULL;
gpointer iter = NULL;
GstVideoCaptionMeta *cc_meta;
guint8 *vancdata;
gboolean got_captions = FALSE;
/* Put any closed captions into the configured line */
while ((cc_meta =
(GstVideoCaptionMeta *) gst_buffer_iterate_meta_filtered (buffer,
&iter, GST_VIDEO_CAPTION_META_API_TYPE))) {
if (self->vbiencoder == NULL) {
self->vbiencoder =
gst_video_vbi_encoder_new (self->info.finfo->format,
self->info.width);
self->anc_vformat = self->info.finfo->format;
}
switch (cc_meta->caption_type) {
case GST_VIDEO_CAPTION_TYPE_CEA608_RAW:{
guint8 data[138];
guint i, n;
n = cc_meta->size / 2;
if (cc_meta->size > 46) {
GST_WARNING_OBJECT (self, "Too big raw CEA608 buffer");
break;
}
/* This is the offset from line 9 for 525-line fields and from line
* 5 for 625-line fields.
*
* The highest bit is set for field 1 but not for field 0, but we
* have no way of knowning the field here
*/
for (i = 0; i < n; i++) {
data[3 * i] = 0x80 | (self->info.height ==
525 ? self->caption_line - 9 : self->caption_line - 5);
data[3 * i + 1] = cc_meta->data[2 * i];
data[3 * i + 2] = cc_meta->data[2 * i + 1];
}
if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
FALSE,
GST_VIDEO_ANCILLARY_DID16_S334_EIA_608 >> 8,
GST_VIDEO_ANCILLARY_DID16_S334_EIA_608 & 0xff, data, 3))
GST_WARNING_OBJECT (self, "Couldn't add meta to ancillary data");
got_captions = TRUE;
break;
}
case GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A:{
if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
FALSE,
GST_VIDEO_ANCILLARY_DID16_S334_EIA_608 >> 8,
GST_VIDEO_ANCILLARY_DID16_S334_EIA_608 & 0xff, cc_meta->data,
cc_meta->size))
GST_WARNING_OBJECT (self, "Couldn't add meta to ancillary data");
got_captions = TRUE;
break;
}
case GST_VIDEO_CAPTION_TYPE_CEA708_RAW:{
guint8 data[256];
guint n;
n = cc_meta->size / 3;
if (cc_meta->size > 46) {
GST_WARNING_OBJECT (self, "Too big raw CEA708 buffer");
break;
}
n = convert_cea708_cc_data_cea708_cdp_internal (self, cc_meta->data,
cc_meta->size, data, sizeof (data), tc_meta);
if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder, FALSE,
GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 >> 8,
GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 & 0xff, data, n))
GST_WARNING_OBJECT (self, "Couldn't add meta to ancillary data");
got_captions = TRUE;
break;
}
case GST_VIDEO_CAPTION_TYPE_CEA708_CDP:{
if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
FALSE,
GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 >> 8,
GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 & 0xff, cc_meta->data,
cc_meta->size))
GST_WARNING_OBJECT (self, "Couldn't add meta to ancillary data");
got_captions = TRUE;
break;
}
default:{
GST_FIXME_OBJECT (self, "Caption type %d not supported",
cc_meta->caption_type);
break;
}
}
}
if (got_captions
&& self->output->output->CreateAncillaryData (format,
&vanc_frame) == S_OK) {
if (vanc_frame->GetBufferForVerticalBlankingLine (self->caption_line,
(void **) &vancdata) == S_OK) {
gst_video_vbi_encoder_write_line (self->vbiencoder, vancdata);
if (frame->SetAncillaryData (vanc_frame) != S_OK) {
GST_WARNING_OBJECT (self, "Failed to set ancillary data");
}
} else {
GST_WARNING_OBJECT (self,
"Failed to get buffer for line %d ancillary data",
self->caption_line);
}
vanc_frame->Release ();
} else if (got_captions) {
GST_WARNING_OBJECT (self, "Failed to allocate ancillary data frame");
}
}
gst_decklink_video_sink_convert_to_internal_clock (self, &running_time, gst_decklink_video_sink_convert_to_internal_clock (self, &running_time,
&running_time_duration); &running_time_duration);
...@@ -1419,9 +1593,11 @@ gst_decklink_video_sink_change_state (GstElement * element, ...@@ -1419,9 +1593,11 @@ gst_decklink_video_sink_change_state (GstElement * element,
} }
GST_OBJECT_LOCK (self); GST_OBJECT_LOCK (self);
if (self->external_base_time == GST_CLOCK_TIME_NONE || self->internal_base_time == GST_CLOCK_TIME_NONE) { if (self->external_base_time == GST_CLOCK_TIME_NONE
|| self->internal_base_time == GST_CLOCK_TIME_NONE) {
self->external_base_time = gst_clock_get_internal_time (clock); self->external_base_time = gst_clock_get_internal_time (clock);
self->internal_base_time = gst_clock_get_internal_time (self->output->clock); self->internal_base_time =
gst_clock_get_internal_time (self->output->clock);
self->internal_time_offset = self->internal_base_time; self->internal_time_offset = self->internal_base_time;
} }
......
...@@ -69,8 +69,11 @@ struct _GstDecklinkVideoSink ...@@ -69,8 +69,11 @@ struct _GstDecklinkVideoSink
GstVideoVBIEncoder *vbiencoder; GstVideoVBIEncoder *vbiencoder;
GstVideoFormat anc_vformat; GstVideoFormat anc_vformat;
gint caption_line; gint caption_line;
guint16 cdp_hdr_sequence_cntr; guint16 cdp_hdr_sequence_cntr;
gint afd_bar_line;
}; };
struct _GstDecklinkVideoSinkClass struct _GstDecklinkVideoSinkClass
......
...@@ -148,6 +148,7 @@ GST_DEBUG_CATEGORY_STATIC (gst_decklink_video_src_debug); ...@@ -148,6 +148,7 @@ GST_DEBUG_CATEGORY_STATIC (gst_decklink_video_src_debug);
#define DEFAULT_SKIP_FIRST_TIME (0) #define DEFAULT_SKIP_FIRST_TIME (0)
#define DEFAULT_DROP_NO_SIGNAL_FRAMES (FALSE) #define DEFAULT_DROP_NO_SIGNAL_FRAMES (FALSE)
#define DEFAULT_OUTPUT_CC (FALSE) #define DEFAULT_OUTPUT_CC (FALSE)
#define DEFAULT_OUTPUT_AFD_BAR (FALSE)
#ifndef ABSDIFF #ifndef ABSDIFF
#define ABSDIFF(x, y) ( (x) > (y) ? ((x) - (y)) : ((y) - (x)) ) #define ABSDIFF(x, y) ( (x) > (y) ? ((x) - (y)) : ((y) - (x)) )
...@@ -168,7 +169,8 @@ enum ...@@ -168,7 +169,8 @@ enum
PROP_DROP_NO_SIGNAL_FRAMES, PROP_DROP_NO_SIGNAL_FRAMES,
PROP_SIGNAL, PROP_SIGNAL,
PROP_HW_SERIAL_NUMBER, PROP_HW_SERIAL_NUMBER,
PROP_OUTPUT_CC PROP_OUTPUT_CC,
PROP_OUTPUT_AFD_BAR,
}; };
typedef struct typedef struct
...@@ -356,6 +358,12 @@ gst_decklink_video_src_class_init (GstDecklinkVideoSrcClass * klass) ...@@ -356,6 +358,12 @@ gst_decklink_video_src_class_init (GstDecklinkVideoSrcClass * klass)
DEFAULT_OUTPUT_CC, DEFAULT_OUTPUT_CC,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_OUTPUT_AFD_BAR,
g_param_spec_boolean ("output-afd-bar", "Output AFD/Bar data",
"Extract and output AFD/Bar as GstMeta (if present)",
DEFAULT_OUTPUT_AFD_BAR,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
templ_caps = gst_decklink_mode_get_template_caps (TRUE); templ_caps = gst_decklink_mode_get_template_caps (TRUE);
gst_element_class_add_pad_template (element_class, gst_element_class_add_pad_template (element_class,
gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, templ_caps)); gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, templ_caps));
...@@ -386,6 +394,8 @@ gst_decklink_video_src_init (GstDecklinkVideoSrc * self) ...@@ -386,6 +394,8 @@ gst_decklink_video_src_init (GstDecklinkVideoSrc * self)
self->output_stream_time = DEFAULT_OUTPUT_STREAM_TIME; self->output_stream_time = DEFAULT_OUTPUT_STREAM_TIME;
self->skip_first_time = DEFAULT_SKIP_FIRST_TIME; self->skip_first_time = DEFAULT_SKIP_FIRST_TIME;
self->drop_no_signal_frames = DEFAULT_DROP_NO_SIGNAL_FRAMES; self->drop_no_signal_frames = DEFAULT_DROP_NO_SIGNAL_FRAMES;
self->output_cc = DEFAULT_OUTPUT_CC;
self->output_afd_bar = DEFAULT_OUTPUT_AFD_BAR;
self->window_size = 64; self->window_size = 64;
self->times = g_new (GstClockTime, 4 * self->window_size); self->times = g_new (GstClockTime, 4 * self->window_size);
...@@ -470,6 +480,9 @@ gst_decklink_video_src_set_property (GObject * object, guint property_id, ...@@ -470,6 +480,9 @@ gst_decklink_video_src_set_property (GObject * object, guint property_id,
case PROP_OUTPUT_CC: case PROP_OUTPUT_CC:
self->output_cc = g_value_get_boolean (value); self->output_cc = g_value_get_boolean (value);
break; break;
case PROP_OUTPUT_AFD_BAR:
self->output_afd_bar = g_value_get_boolean (value);
break;
default: default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break; break;
...@@ -527,6 +540,9 @@ gst_decklink_video_src_get_property (GObject * object, guint property_id, ...@@ -527,6 +540,9 @@ gst_decklink_video_src_get_property (GObject * object, guint property_id,
case PROP_OUTPUT_CC: case PROP_OUTPUT_CC:
g_value_set_boolean (value, self->output_cc); g_value_set_boolean (value, self->output_cc);
break; break;
case PROP_OUTPUT_AFD_BAR:
g_value_set_boolean (value, self->output_afd_bar);
break;
default: default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break; break;
...@@ -929,14 +945,118 @@ gst_decklink_video_src_got_frame (GstElement * element, ...@@ -929,14 +945,118 @@ gst_decklink_video_src_got_frame (GstElement * element,
} }
static void static void
extract_cc_from_vbi (GstDecklinkVideoSrc * self, GstBuffer ** buffer, extract_vbi_line (GstDecklinkVideoSrc * self, GstBuffer ** buffer,
IDeckLinkVideoFrameAncillary * vanc_frame, guint field2_offset, guint line,
gboolean * found_cc_out, gboolean * found_afd_bar_out)
{
GstVideoAncillary gstanc;
const guint8 *vancdata;
gboolean found_cc = FALSE, found_afd_bar = FALSE;
if (vanc_frame->GetBufferForVerticalBlankingLine (field2_offset + line,
(void **) &vancdata) != S_OK)
return;
GST_DEBUG_OBJECT (self, "Checking for VBI data on field line %u (field %u)",
field2_offset + line, field2_offset ? 2 : 1);
gst_video_vbi_parser_add_line (self->vbiparser, vancdata);
/* Check if CC or AFD/Bar is on this line if we didn't find any on a
* previous line. Remember the line where we found them */
while (gst_video_vbi_parser_get_ancillary (self->vbiparser,
&gstanc) == GST_VIDEO_VBI_PARSER_RESULT_OK) {
g_print ("found at line %d\n", field2_offset + line);
gst_util_dump_mem (vancdata, 16);
switch (GST_VIDEO_ANCILLARY_DID16 (&gstanc)) {
case GST_VIDEO_ANCILLARY_DID16_S334_EIA_708:
if (*found_cc_out || !self->output_cc)
continue;
GST_DEBUG_OBJECT (self,
"Adding CEA-708 CDP meta to buffer for line %u",
field2_offset + line);
GST_MEMDUMP_OBJECT (self, "CDP", gstanc.data, gstanc.data_count);
gst_buffer_add_video_caption_meta (*buffer,
GST_VIDEO_CAPTION_TYPE_CEA708_CDP, gstanc.data, gstanc.data_count);
found_cc = TRUE;
if (field2_offset)
self->last_cc_vbi_line_field2 = line;
else
self->last_cc_vbi_line = line;
break;
case GST_VIDEO_ANCILLARY_DID16_S334_EIA_608:
if (*found_cc_out || !self->output_cc)
continue;
GST_DEBUG_OBJECT (self,
"Adding CEA-608 meta to buffer for line %u", field2_offset + line);