Commit 3afec4dd authored by Aurélien Zanelli's avatar Aurélien Zanelli Committed by Nicolas Dufresne

v4l2: set min_latency for output device according to required minimum number of buffers

Since we can get the minimum number of buffers needed by an output
device to work, use it to set min_latency which will determine how many
buffers are queued.

https://bugzilla.gnome.org/show_bug.cgi?id=736072
parent d9a7954d
......@@ -623,8 +623,10 @@ gst_v4l2_buffer_pool_start (GstBufferPool * bpool)
&max_buffers))
goto wrong_config;
/* TODO Also consider min_buffers_for_output when implemented */
min_latency = MAX (GST_V4L2_MIN_BUFFERS, obj->min_buffers_for_capture);
if (V4L2_TYPE_IS_OUTPUT (obj->type))
min_latency = MAX (GST_V4L2_MIN_BUFFERS, obj->min_buffers_for_output);
else
min_latency = MAX (GST_V4L2_MIN_BUFFERS, obj->min_buffers_for_capture);
switch (obj->mode) {
case GST_V4L2_IO_RW:
......
......@@ -3472,10 +3472,13 @@ gst_v4l2_object_propose_allocation (GstV4l2Object * obj, GstQuery * query)
if (v4l2_ioctl (obj->video_fd, VIDIOC_G_CTRL, &ctl) >= 0) {
GST_DEBUG_OBJECT (obj->element, "driver require a miminum of %d buffers",
ctl.value);
min = MAX (ctl.value, GST_V4L2_MIN_BUFFERS);
obj->min_buffers_for_output = ctl.value;
} else {
obj->min_buffers_for_output = 0;
}
min = MAX (obj->min_buffers_for_output, GST_V4L2_MIN_BUFFERS);
gst_query_add_allocation_pool (query, pool, size, min, max);
/* we also support various metadata */
......
......@@ -121,6 +121,9 @@ struct _GstV4l2Object {
* calculate the minimum latency of a m2m decoder. */
guint32 min_buffers_for_capture;
/* This will be set if supported in propose allocation. */
guint32 min_buffers_for_output;
/* wanted mode */
GstV4l2IOMode req_mode;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment