Commit 375a50a8 authored by Hyunjun Ko's avatar Hyunjun Ko Committed by Sreerenj Balachandran

msdk: add async depth from each msdk element to GstMsdkContext to be shared

In case that pipeline is like ".. ! decoder ! encoder ! ..." with using
video memory,
decoder needs to know the async depth of the following msdk element so
that it could
allocate the correct number of video memory.

Otherwise, decoder's memory is exhausted while processing.

https://bugzilla.gnome.org/show_bug.cgi?id=790752
parent f2b35abc
......@@ -56,6 +56,7 @@ struct _GstMsdkContextPrivate
gboolean hardware;
gboolean is_joined;
GstMsdkContextJobType job_type;
gint shared_async_depth;
#ifndef _WIN32
gint fd;
VADisplay dpy;
......@@ -348,3 +349,16 @@ gst_msdk_context_add_job_type (GstMsdkContext * context,
{
context->priv->job_type |= job_type;
}
gint
gst_msdk_context_get_shared_async_depth (GstMsdkContext * context)
{
return context->priv->shared_async_depth;
}
void
gst_msdk_context_add_shared_async_depth (GstMsdkContext * context,
gint async_depth)
{
context->priv->shared_async_depth += async_depth;
}
......@@ -125,6 +125,12 @@ gst_msdk_context_get_job_type (GstMsdkContext * context);
void
gst_msdk_context_add_job_type (GstMsdkContext * context, GstMsdkContextJobType job_type);
gint
gst_msdk_context_get_shared_async_depth (GstMsdkContext * context);
void
gst_msdk_context_add_shared_async_depth (GstMsdkContext * context, gint async_depth);
G_END_DECLS
#endif /* GST_MSDK_CONTEXT_H */
......@@ -328,8 +328,13 @@ gst_msdkdec_init_decoder (GstMsdkDec * thiz)
}
if (thiz->use_video_memory) {
gint shared_async_depth;
shared_async_depth =
gst_msdk_context_get_shared_async_depth (thiz->context);
request.NumFrameSuggested += shared_async_depth;
request.Type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
request.NumFrameSuggested += thiz->async_depth;
gst_msdk_frame_alloc (thiz->context, &request, &thiz->alloc_resp);
}
......@@ -501,6 +506,9 @@ gst_msdkdec_start (GstVideoDecoder * decoder)
parent_context = thiz->context;
thiz->context = gst_msdk_context_new_with_parent (parent_context);
gst_msdk_context_add_shared_async_depth (thiz->context,
gst_msdk_context_get_shared_async_depth (parent_context));
gst_object_unref (parent_context);
GST_INFO_OBJECT (thiz,
......@@ -517,6 +525,8 @@ gst_msdkdec_start (GstVideoDecoder * decoder)
thiz->context);
}
gst_msdk_context_add_shared_async_depth (thiz->context, thiz->async_depth);
return TRUE;
}
......
......@@ -265,6 +265,9 @@ gst_msdkenc_init_encoder (GstMsdkEnc * thiz)
msdk_status_to_string (status));
}
if (thiz->use_video_memory)
request[0].NumFrameSuggested +=
gst_msdk_context_get_shared_async_depth (thiz->context);
thiz->num_vpp_surfaces = request[0].NumFrameSuggested;
if (thiz->use_video_memory)
......@@ -1157,6 +1160,8 @@ gst_msdkenc_start (GstVideoEncoder * encoder)
thiz->context);
}
gst_msdk_context_add_shared_async_depth (thiz->context, thiz->async_depth);
/* Set the minimum pts to some huge value (1000 hours). This keeps
the dts at the start of the stream from needing to be
negative. */
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment