gstvaapidecoder_h264.c 104 KB
Newer Older
1 2 3
/*
 *  gstvaapidecoder_h264.c - H.264 decoder
 *
4
 *  Copyright (C) 2011-2013 Intel Corporation
5
 *    Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27
 *
 *  This library is free software; you can redistribute it and/or
 *  modify it under the terms of the GNU Lesser General Public License
 *  as published by the Free Software Foundation; either version 2.1
 *  of the License, or (at your option) any later version.
 *
 *  This library is distributed in the hope that it will be useful,
 *  but WITHOUT ANY WARRANTY; without even the implied warranty of
 *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
 *  Lesser General Public License for more details.
 *
 *  You should have received a copy of the GNU Lesser General Public
 *  License along with this library; if not, write to the Free
 *  Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
 *  Boston, MA 02110-1301 USA
 */

/**
 * SECTION:gstvaapidecoder_h264
 * @short_description: H.264 decoder
 */

28
#include "sysdeps.h"
29
#include <string.h>
30
#include <gst/base/gstadapter.h>
31 32 33 34 35 36 37 38 39 40
#include <gst/codecparsers/gsth264parser.h>
#include "gstvaapidecoder_h264.h"
#include "gstvaapidecoder_objects.h"
#include "gstvaapidecoder_priv.h"
#include "gstvaapidisplay_priv.h"
#include "gstvaapiobject_priv.h"

#define DEBUG 1
#include "gstvaapidebug.h"

41 42 43
/* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
#define USE_STRICT_DPB_ORDERING 0

44 45
typedef struct _GstVaapiDecoderH264Private      GstVaapiDecoderH264Private;
typedef struct _GstVaapiDecoderH264Class        GstVaapiDecoderH264Class;
46 47
typedef struct _GstVaapiFrameStore              GstVaapiFrameStore;
typedef struct _GstVaapiFrameStoreClass         GstVaapiFrameStoreClass;
48
typedef struct _GstVaapiParserInfoH264          GstVaapiParserInfoH264;
49 50
typedef struct _GstVaapiPictureH264             GstVaapiPictureH264;

51 52 53 54
// Used for field_poc[]
#define TOP_FIELD       0
#define BOTTOM_FIELD    1

55
/* ------------------------------------------------------------------------- */
56
/* --- H.264 Parser Info                                                 --- */
57 58
/* ------------------------------------------------------------------------- */

59 60 61 62
#define GST_VAAPI_PARSER_INFO_H264(obj) \
    ((GstVaapiParserInfoH264 *)(obj))

struct _GstVaapiParserInfoH264 {
63
    GstVaapiMiniObject  parent_instance;
64 65 66 67 68 69 70 71
    GstH264NalUnit      nalu;
    union {
        GstH264SPS      sps;
        GstH264PPS      pps;
        GstH264SliceHdr slice_hdr;
    }                   data;
};

72 73
static inline const GstVaapiMiniObjectClass *
gst_vaapi_parser_info_h264_class(void)
74
{
75 76 77
    static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
        sizeof(GstVaapiParserInfoH264),
        NULL
78
    };
79 80
    return &GstVaapiParserInfoH264Class;
}
81

82 83 84 85 86
static inline GstVaapiParserInfoH264 *
gst_vaapi_parser_info_h264_new(void)
{
    return (GstVaapiParserInfoH264 *)
        gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
87 88
}

89 90 91 92 93 94 95 96 97 98
#define gst_vaapi_parser_info_h264_ref(pi) \
    gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))

#define gst_vaapi_parser_info_h264_unref(pi) \
    gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))

#define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi)          \
    gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr),  \
        (GstVaapiMiniObject *)(new_pi))

99 100 101 102
/* ------------------------------------------------------------------------- */
/* --- H.264 Pictures                                                    --- */
/* ------------------------------------------------------------------------- */

103 104 105 106
/*
 * Extended picture flags:
 *
 * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
107 108 109 110 111 112
 * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
 *     "used for short-term reference"
 * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
 *     "used for long-term reference"
 * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
 *     reference picture (short-term reference or long-term reference)
113 114 115
 */
enum {
    GST_VAAPI_PICTURE_FLAG_IDR = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
116 117 118 119 120 121 122 123

    GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
        GST_VAAPI_PICTURE_FLAG_REFERENCE),
    GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
        GST_VAAPI_PICTURE_FLAG_REFERENCE | (GST_VAAPI_PICTURE_FLAG_LAST << 1)),
    GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
        GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
        GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
124 125 126 127 128
};

#define GST_VAAPI_PICTURE_IS_IDR(picture) \
    (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR))

129 130 131 132 133 134 135 136 137 138
#define GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture)      \
    ((GST_VAAPI_PICTURE_FLAGS(picture) &                        \
      GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
     GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE)

#define GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)       \
    ((GST_VAAPI_PICTURE_FLAGS(picture) &                        \
      GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
     GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)

139 140
struct _GstVaapiPictureH264 {
    GstVaapiPicture             base;
141
    GstH264PPS                 *pps;
142
    GstH264SliceHdr            *last_slice_hdr;
143
    guint                       structure;
144
    gint32                      field_poc[2];
145 146
    gint32                      frame_num;              // Original frame_num from slice_header()
    gint32                      frame_num_wrap;         // Temporary for ref pic marking: FrameNumWrap
147
    gint32                      long_term_frame_idx;    // Temporary for ref pic marking: LongTermFrameIdx
148 149
    gint32                      pic_num;                // Temporary for ref pic marking: PicNum
    gint32                      long_term_pic_num;      // Temporary for ref pic marking: LongTermPicNum
150
    GstVaapiPictureH264        *other_field;            // Temporary for ref pic marking: other field in the same frame store
151 152
    guint                       output_flag             : 1;
    guint                       output_needed           : 1;
153 154
};

155
GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264, gst_vaapi_picture_h264);
156

157 158
void
gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *picture)
159
{
160
    gst_vaapi_picture_destroy(GST_VAAPI_PICTURE(picture));
161 162
}

163
gboolean
164 165 166 167 168
gst_vaapi_picture_h264_create(
    GstVaapiPictureH264                      *picture,
    const GstVaapiCodecObjectConstructorArgs *args
)
{
169 170
    if (!gst_vaapi_picture_create(GST_VAAPI_PICTURE(picture), args))
        return FALSE;
171

172 173
    picture->field_poc[0]       = G_MAXINT32;
    picture->field_poc[1]       = G_MAXINT32;
174
    picture->output_needed      = FALSE;
175
    return TRUE;
176 177 178 179 180
}

static inline GstVaapiPictureH264 *
gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
{
Gwenole Beauchesne's avatar
Gwenole Beauchesne committed
181
    return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
182
        &GstVaapiPictureH264Class,
183 184
        GST_VAAPI_CODEC_BASE(decoder),
        NULL, sizeof(VAPictureParameterBufferH264),
185
        NULL, 0,
Gwenole Beauchesne's avatar
Gwenole Beauchesne committed
186
        0);
187 188
}

189 190 191
static inline void
gst_vaapi_picture_h264_set_reference(
    GstVaapiPictureH264 *picture,
192 193
    guint                reference_flags,
    gboolean             other_field
194 195
)
{
Gwenole Beauchesne's avatar
Gwenole Beauchesne committed
196 197
    if (!picture)
        return;
198 199
    GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
    GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
200 201 202 203 204

    if (!other_field || !(picture = picture->other_field))
        return;
    GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
    GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
205 206
}

207 208 209
static inline GstVaapiPictureH264 *
gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
{
Gwenole Beauchesne's avatar
Gwenole Beauchesne committed
210
    g_return_val_if_fail(picture, NULL);
211

Gwenole Beauchesne's avatar
Gwenole Beauchesne committed
212
    return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
213 214
}

215 216 217 218 219 220
/* ------------------------------------------------------------------------- */
/* --- Frame Buffers (DPB)                                               --- */
/* ------------------------------------------------------------------------- */

struct _GstVaapiFrameStore {
    /*< private >*/
221
    GstVaapiMiniObject          parent_instance;
222 223 224 225 226 227 228 229

    guint                       structure;
    GstVaapiPictureH264        *buffers[2];
    guint                       num_buffers;
    guint                       output_needed;
};

static void
230
gst_vaapi_frame_store_finalize(gpointer object)
231
{
232
    GstVaapiFrameStore * const fs = object;
233 234 235 236 237 238 239 240 241 242 243
    guint i;

    for (i = 0; i < fs->num_buffers; i++)
        gst_vaapi_picture_replace(&fs->buffers[i], NULL);
}

static GstVaapiFrameStore *
gst_vaapi_frame_store_new(GstVaapiPictureH264 *picture)
{
    GstVaapiFrameStore *fs;

244 245 246 247 248 249 250
    static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
        sizeof(GstVaapiFrameStore),
        gst_vaapi_frame_store_finalize
    };

    fs = (GstVaapiFrameStore *)
        gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
251 252 253
    if (!fs)
        return NULL;

254
    fs->structure       = picture->structure;
255
    fs->buffers[0]      = gst_vaapi_picture_ref(picture);
256
    fs->buffers[1]      = NULL;
257 258 259 260 261
    fs->num_buffers     = 1;
    fs->output_needed   = picture->output_needed;
    return fs;
}

262 263 264 265 266 267 268
static gboolean
gst_vaapi_frame_store_add(GstVaapiFrameStore *fs, GstVaapiPictureH264 *picture)
{
    guint field;

    g_return_val_if_fail(fs->num_buffers == 1, FALSE);
    g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
Gwenole Beauchesne's avatar
Gwenole Beauchesne committed
269
    g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315

    gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
    if (picture->output_flag) {
        picture->output_needed = TRUE;
        fs->output_needed++;
    }

    fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;

    field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
        TOP_FIELD : BOTTOM_FIELD;
    g_return_val_if_fail(fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
    fs->buffers[0]->field_poc[field] = picture->field_poc[field];
    g_return_val_if_fail(picture->field_poc[!field] == G_MAXINT32, FALSE);
    picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
    return TRUE;
}

static gboolean
gst_vaapi_frame_store_split_fields(GstVaapiFrameStore *fs)
{
    GstVaapiPictureH264 * const first_field = fs->buffers[0];
    GstVaapiPictureH264 *second_field;

    g_return_val_if_fail(fs->num_buffers == 1, FALSE);

    first_field->base.structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
    GST_VAAPI_PICTURE_FLAG_SET(first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);

    second_field = gst_vaapi_picture_h264_new_field(first_field);
    if (!second_field)
        return FALSE;
    gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], second_field);
    gst_vaapi_picture_unref(second_field);

    second_field->frame_num    = first_field->frame_num;
    second_field->field_poc[0] = first_field->field_poc[0];
    second_field->field_poc[1] = first_field->field_poc[1];
    second_field->output_flag  = first_field->output_flag;
    if (second_field->output_flag) {
        second_field->output_needed = TRUE;
        fs->output_needed++;
    }
    return TRUE;
}

316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334
static inline gboolean
gst_vaapi_frame_store_has_frame(GstVaapiFrameStore *fs)
{
    return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
}

static inline gboolean
gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
{
    guint i;

    for (i = 0; i < fs->num_buffers; i++) {
        if (GST_VAAPI_PICTURE_IS_REFERENCE(fs->buffers[i]))
            return TRUE;
    }
    return FALSE;
}

#define gst_vaapi_frame_store_ref(fs) \
335
    gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
336 337

#define gst_vaapi_frame_store_unref(fs) \
338
    gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(fs))
339

340 341 342
#define gst_vaapi_frame_store_replace(old_fs_p, new_fs)                 \
    gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_fs_p),    \
        (GstVaapiMiniObject *)(new_fs))
343

344 345 346 347
/* ------------------------------------------------------------------------- */
/* --- H.264 Decoder                                                     --- */
/* ------------------------------------------------------------------------- */

Gwenole Beauchesne's avatar
Gwenole Beauchesne committed
348 349 350
#define GST_VAAPI_DECODER_H264_CAST(decoder) \
    ((GstVaapiDecoderH264 *)(decoder))

351 352 353
struct _GstVaapiDecoderH264Private {
    GstH264NalParser           *parser;
    GstVaapiPictureH264        *current_picture;
354
    GstVaapiParserInfoH264     *prev_slice_pi;
355 356
    GstVaapiFrameStore         *prev_frame;
    GstVaapiFrameStore         *dpb[16];
357
    guint                       dpb_count;
358
    guint                       dpb_size;
359
    GstVaapiProfile             profile;
360 361
    GstVaapiEntrypoint          entrypoint;
    GstVaapiChromaType          chroma_type;
362
    GstVaapiPictureH264        *short_ref[32];
363
    guint                       short_ref_count;
364
    GstVaapiPictureH264        *long_ref[32];
365 366 367 368 369
    guint                       long_ref_count;
    GstVaapiPictureH264        *RefPicList0[32];
    guint                       RefPicList0_count;
    GstVaapiPictureH264        *RefPicList1[32];
    guint                       RefPicList1_count;
370
    guint                       nal_length_size;
371 372
    guint                       mb_width;
    guint                       mb_height;
373 374 375 376 377 378 379 380
    gint32                      field_poc[2];           // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
    gint32                      poc_msb;                // PicOrderCntMsb
    gint32                      poc_lsb;                // pic_order_cnt_lsb (from slice_header())
    gint32                      prev_poc_msb;           // prevPicOrderCntMsb
    gint32                      prev_poc_lsb;           // prevPicOrderCntLsb
    gint32                      frame_num_offset;       // FrameNumOffset
    gint32                      frame_num;              // frame_num (from slice_header())
    gint32                      prev_frame_num;         // prevFrameNum
381
    gboolean                    prev_pic_has_mmco5;     // prevMmco5Pic
382
    gboolean                    prev_pic_structure;     // previous picture structure
383
    guint                       is_opened               : 1;
Gwenole Beauchesne's avatar
Gwenole Beauchesne committed
384
    guint                       is_avcC                 : 1;
385 386
    guint                       got_sps                 : 1;
    guint                       got_pps                 : 1;
387
    guint                       has_context             : 1;
388
    guint                       progressive_sequence    : 1;
389 390
};

391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411
/**
 * GstVaapiDecoderH264:
 *
 * A decoder based on H264.
 */
struct _GstVaapiDecoderH264 {
    /*< private >*/
    GstVaapiDecoder             parent_instance;
    GstVaapiDecoderH264Private  priv;
};

/**
 * GstVaapiDecoderH264Class:
 *
 * A decoder class based on H264.
 */
struct _GstVaapiDecoderH264Class {
    /*< private >*/
    GstVaapiDecoderClass parent_class;
};

412
static gboolean
413
exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
414

415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455
/* Get number of reference frames to use */
static guint
get_max_dec_frame_buffering(GstH264SPS *sps)
{
    guint max_dec_frame_buffering, MaxDpbMbs, PicSizeMbs;

    /* Table A-1 - Level limits */
    switch (sps->level_idc) {
    case 10: MaxDpbMbs = 396;    break;
    case 11: MaxDpbMbs = 900;    break;
    case 12: MaxDpbMbs = 2376;   break;
    case 13: MaxDpbMbs = 2376;   break;
    case 20: MaxDpbMbs = 2376;   break;
    case 21: MaxDpbMbs = 4752;   break;
    case 22: MaxDpbMbs = 8100;   break;
    case 30: MaxDpbMbs = 8100;   break;
    case 31: MaxDpbMbs = 18000;  break;
    case 32: MaxDpbMbs = 20480;  break;
    case 40: MaxDpbMbs = 32768;  break;
    case 41: MaxDpbMbs = 32768;  break;
    case 42: MaxDpbMbs = 34816;  break;
    case 50: MaxDpbMbs = 110400; break;
    case 51: MaxDpbMbs = 184320; break;
    default:
        g_assert(0 && "unhandled level");
        break;
    }

    PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
                  (sps->pic_height_in_map_units_minus1 + 1) *
                  (sps->frame_mbs_only_flag ? 1 : 2));
    max_dec_frame_buffering = MaxDpbMbs / PicSizeMbs;

    /* VUI parameters */
    if (sps->vui_parameters_present_flag) {
        GstH264VUIParams * const vui_params = &sps->vui_parameters;
        if (vui_params->bitstream_restriction_flag)
            max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
        else {
            switch (sps->profile_idc) {
            case 44:  // CAVLC 4:4:4 Intra profile
456 457 458 459 460
            case GST_H264_PROFILE_SCALABLE_HIGH:
            case GST_H264_PROFILE_HIGH:
            case GST_H264_PROFILE_HIGH10:
            case GST_H264_PROFILE_HIGH_422:
            case GST_H264_PROFILE_HIGH_444:
461 462 463 464 465 466 467 468 469 470 471 472 473 474
                if (sps->constraint_set3_flag)
                    max_dec_frame_buffering = 0;
                break;
            }
        }
    }

    if (max_dec_frame_buffering > 16)
        max_dec_frame_buffering = 16;
    else if (max_dec_frame_buffering < sps->num_ref_frames)
        max_dec_frame_buffering = sps->num_ref_frames;
    return MAX(1, max_dec_frame_buffering);
}

475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514
static void
array_remove_index_fast(void *array, guint *array_length_ptr, guint index)
{
    gpointer * const entries = array;
    guint num_entries = *array_length_ptr;

    g_return_if_fail(index < num_entries);

    if (index != --num_entries)
        entries[index] = entries[num_entries];
    entries[num_entries] = NULL;
    *array_length_ptr = num_entries;
}

#if 1
static inline void
array_remove_index(void *array, guint *array_length_ptr, guint index)
{
    array_remove_index_fast(array, array_length_ptr, index);
}
#else
static void
array_remove_index(void *array, guint *array_length_ptr, guint index)
{
    gpointer * const entries = array;
    const guint num_entries = *array_length_ptr - 1;
    guint i;

    g_return_if_fail(index <= num_entries);

    for (i = index; i < num_entries; i++)
        entries[i] = entries[i + 1];
    entries[num_entries] = NULL;
    *array_length_ptr = num_entries;
}
#endif

#define ARRAY_REMOVE_INDEX(array, index) \
    array_remove_index(array, &array##_count, index)

515 516 517
static void
dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
{
518
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
519
    guint i, num_frames = --priv->dpb_count;
520

521
    if (USE_STRICT_DPB_ORDERING) {
522 523
        for (i = index; i < num_frames; i++)
            gst_vaapi_frame_store_replace(&priv->dpb[i], priv->dpb[i + 1]);
524
    }
525 526 527
    else if (index != num_frames)
        gst_vaapi_frame_store_replace(&priv->dpb[index], priv->dpb[num_frames]);
    gst_vaapi_frame_store_replace(&priv->dpb[num_frames], NULL);
528 529
}

530 531 532 533 534 535
static gboolean
dpb_output(
    GstVaapiDecoderH264 *decoder,
    GstVaapiFrameStore  *fs,
    GstVaapiPictureH264 *picture
)
536 537
{
    picture->output_needed = FALSE;
538

539 540 541 542 543
    if (fs) {
        if (--fs->output_needed > 0)
            return TRUE;
        picture = fs->buffers[0];
    }
544 545 546
    return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
}

547 548 549
static inline void
dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
{
550 551
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
    GstVaapiFrameStore * const fs = priv->dpb[i];
552 553 554 555 556

    if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
        dpb_remove_index(decoder, i);
}

557 558 559
static gboolean
dpb_bump(GstVaapiDecoderH264 *decoder)
{
560
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
561 562
    GstVaapiPictureH264 *found_picture = NULL;
    guint i, j, found_index;
563 564 565
    gboolean success;

    for (i = 0; i < priv->dpb_count; i++) {
566 567 568 569 570 571 572 573 574 575
        GstVaapiFrameStore * const fs = priv->dpb[i];
        if (!fs->output_needed)
            continue;
        for (j = 0; j < fs->num_buffers; j++) {
            GstVaapiPictureH264 * const picture = fs->buffers[j];
            if (!picture->output_needed)
                continue;
            if (!found_picture || found_picture->base.poc > picture->base.poc)
                found_picture = picture, found_index = i;
        }
576
    }
577
    if (!found_picture)
578 579
        return FALSE;

580 581
    success = dpb_output(decoder, priv->dpb[found_index], found_picture);
    dpb_evict(decoder, found_picture, found_index);
582 583 584 585
    return success;
}

static void
586
dpb_clear(GstVaapiDecoderH264 *decoder)
587
{
588
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
589
    guint i;
590

591
    for (i = 0; i < priv->dpb_count; i++)
592
        gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
593
    priv->dpb_count = 0;
594 595

    gst_vaapi_frame_store_replace(&priv->prev_frame, NULL);
596 597 598 599 600
}

static void
dpb_flush(GstVaapiDecoderH264 *decoder)
{
601 602
    while (dpb_bump(decoder))
        ;
603
    dpb_clear(decoder);
604 605 606 607 608
}

static gboolean
dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
{
609
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
610 611
    GstVaapiFrameStore *fs;
    guint i, j;
612 613

    // Remove all unused pictures
Gwenole Beauchesne's avatar
Gwenole Beauchesne committed
614
    if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
615 616
        i = 0;
        while (i < priv->dpb_count) {
617 618
            GstVaapiFrameStore * const fs = priv->dpb[i];
            if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
619 620 621 622 623 624
                dpb_remove_index(decoder, i);
            else
                i++;
        }
    }

625 626
    // Check if picture is the second field and the first field is still in DPB
    fs = priv->prev_frame;
Gwenole Beauchesne's avatar
Gwenole Beauchesne committed
627
    if (fs && !gst_vaapi_frame_store_has_frame(fs))
628 629 630
        return gst_vaapi_frame_store_add(fs, picture);

    // Create new frame store, and split fields if necessary
631 632 633 634 635 636
    fs = gst_vaapi_frame_store_new(picture);
    if (!fs)
        return FALSE;
    gst_vaapi_frame_store_replace(&priv->prev_frame, fs);
    gst_vaapi_frame_store_unref(fs);

637 638 639 640 641
    if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
        if (!gst_vaapi_frame_store_split_fields(fs))
            return FALSE;
    }

642 643 644 645 646 647
    // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
    if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
        while (priv->dpb_count == priv->dpb_size) {
            if (!dpb_bump(decoder))
                return FALSE;
        }
648 649
        gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
        if (picture->output_flag) {
650
            picture->output_needed = TRUE;
651 652
            fs->output_needed++;
        }
653 654 655 656 657 658 659
    }

    // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
    else {
        if (!picture->output_flag)
            return TRUE;
        while (priv->dpb_count == priv->dpb_size) {
660 661 662 663 664 665 666 667
            gboolean found_picture = FALSE;
            for (i = 0; !found_picture && i < priv->dpb_count; i++) {
                GstVaapiFrameStore * const fs = priv->dpb[i];
                if (!fs->output_needed)
                    continue;
                for (j = 0; !found_picture && j < fs->num_buffers; j++)
                    found_picture = fs->buffers[j]->output_needed &&
                        fs->buffers[j]->base.poc < picture->base.poc;
668
            }
669 670
            if (!found_picture)
                return dpb_output(decoder, NULL, picture);
671 672 673
            if (!dpb_bump(decoder))
                return FALSE;
        }
674
        gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
675
        picture->output_needed = TRUE;
676
        fs->output_needed++;
677 678 679 680
    }
    return TRUE;
}

681
static inline void
682 683
dpb_reset(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
{
684
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
685

686
    priv->dpb_size = get_max_dec_frame_buffering(sps);
687 688 689
    GST_DEBUG("DPB size %u", priv->dpb_size);
}

690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714
static GstVaapiDecoderStatus
get_status(GstH264ParserResult result)
{
    GstVaapiDecoderStatus status;

    switch (result) {
    case GST_H264_PARSER_OK:
        status = GST_VAAPI_DECODER_STATUS_SUCCESS;
        break;
    case GST_H264_PARSER_NO_NAL_END:
        status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
        break;
    case GST_H264_PARSER_ERROR:
        status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
        break;
    default:
        status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
        break;
    }
    return status;
}

static void
gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
{
715
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
716 717

    gst_vaapi_picture_replace(&priv->current_picture, NULL);
718
    gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
719 720

    dpb_clear(decoder);
721 722 723 724 725 726 727 728

    if (priv->parser) {
        gst_h264_nal_parser_free(priv->parser);
        priv->parser = NULL;
    }
}

static gboolean
729
gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
730
{
731
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
732 733 734 735 736 737 738 739 740 741

    gst_vaapi_decoder_h264_close(decoder);

    priv->parser = gst_h264_nal_parser_new();
    if (!priv->parser)
        return FALSE;
    return TRUE;
}

static void
742
gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
743
{
744 745 746
    GstVaapiDecoderH264 * const decoder =
        GST_VAAPI_DECODER_H264_CAST(base_decoder);

747 748 749 750
    gst_vaapi_decoder_h264_close(decoder);
}

static gboolean
751
gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
752
{
753 754 755 756 757 758 759 760 761
    GstVaapiDecoderH264 * const decoder =
        GST_VAAPI_DECODER_H264_CAST(base_decoder);
    GstVaapiDecoderH264Private * const priv = &decoder->priv;

    priv->profile               = GST_VAAPI_PROFILE_UNKNOWN;
    priv->entrypoint            = GST_VAAPI_ENTRYPOINT_VLD;
    priv->chroma_type           = GST_VAAPI_CHROMA_TYPE_YUV420;
    priv->prev_pic_structure    = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
    priv->progressive_sequence  = TRUE;
762 763 764
    return TRUE;
}

765 766 767 768 769 770
static guint
h264_get_profile(GstH264SPS *sps)
{
    guint profile = 0;

    switch (sps->profile_idc) {
771
    case GST_H264_PROFILE_BASELINE:
772 773
        profile = GST_VAAPI_PROFILE_H264_BASELINE;
        break;
774
    case GST_H264_PROFILE_MAIN:
775 776
        profile = GST_VAAPI_PROFILE_H264_MAIN;
        break;
777
    case GST_H264_PROFILE_HIGH:
778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806
        profile = GST_VAAPI_PROFILE_H264_HIGH;
        break;
    }
    return profile;
}

static guint
h264_get_chroma_type(GstH264SPS *sps)
{
    guint chroma_type = 0;

    switch (sps->chroma_format_idc) {
    case 1:
        chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
        break;
    case 2:
        chroma_type = GST_VAAPI_CHROMA_TYPE_YUV422;
        break;
    case 3:
        if (!sps->separate_colour_plane_flag)
            chroma_type = GST_VAAPI_CHROMA_TYPE_YUV444;
        break;
    }
    return chroma_type;
}

static GstVaapiProfile
get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
{
807
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836
    GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
    GstVaapiProfile profile, profiles[2];
    guint i, n_profiles = 0;

    profile = h264_get_profile(sps);
    if (!profile)
        return GST_VAAPI_PROFILE_UNKNOWN;

    profiles[n_profiles++] = profile;
    switch (profile) {
    case GST_VAAPI_PROFILE_H264_MAIN:
        profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
        break;
    default:
        break;
    }

    /* If the preferred profile (profiles[0]) matches one that we already
       found, then just return it now instead of searching for it again */
    if (profiles[0] == priv->profile)
        return priv->profile;

    for (i = 0; i < n_profiles; i++) {
        if (gst_vaapi_display_has_decoder(display, profiles[i], priv->entrypoint))
            return profiles[i];
    }
    return GST_VAAPI_PROFILE_UNKNOWN;
}

837 838 839
static GstVaapiDecoderStatus
ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
{
840
    GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
841
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
842 843 844 845
    GstVaapiContextInfo info;
    GstVaapiProfile profile;
    GstVaapiChromaType chroma_type;
    gboolean reset_context = FALSE;
846
    guint mb_width, mb_height;
847

848 849 850 851 852 853 854
    profile = get_profile(decoder, sps);
    if (!profile) {
        GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
        return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
    }

    if (priv->profile != profile) {
855 856
        GST_DEBUG("profile changed");
        reset_context = TRUE;
857 858
        priv->profile = profile;
    }
859

860 861 862 863
    chroma_type = h264_get_chroma_type(sps);
    if (!chroma_type || chroma_type != GST_VAAPI_CHROMA_TYPE_YUV420) {
        GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
        return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
864 865
    }

866
    if (priv->chroma_type != chroma_type) {
867
        GST_DEBUG("chroma format changed");
868 869
        reset_context     = TRUE;
        priv->chroma_type = chroma_type;
870 871
    }

872 873 874 875
    mb_width  = sps->pic_width_in_mbs_minus1 + 1;
    mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
        !sps->frame_mbs_only_flag;
    if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
876
        GST_DEBUG("size changed");
877 878 879
        reset_context   = TRUE;
        priv->mb_width  = mb_width;
        priv->mb_height = mb_height;
880 881
    }

882 883 884 885 886 887
    priv->progressive_sequence = sps->frame_mbs_only_flag;
#if 0
    /* XXX: we only output complete frames for now */
    gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
#endif

888 889 890 891 892
    gst_vaapi_decoder_set_pixel_aspect_ratio(
        base_decoder,
        sps->vui_parameters.par_n,
        sps->vui_parameters.par_d
    );
893

894 895
    if (!reset_context && priv->has_context)
        return GST_VAAPI_DECODER_STATUS_SUCCESS;
896

897
    /* XXX: fix surface size when cropping is implemented */
898 899
    info.profile    = priv->profile;
    info.entrypoint = priv->entrypoint;
900 901
    info.width      = sps->width;
    info.height     = sps->height;
902
    info.ref_frames = get_max_dec_frame_buffering(sps);
903

904 905 906 907 908 909
    if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
    priv->has_context = TRUE;

    /* Reset DPB */
    dpb_reset(decoder, sps);
910 911 912
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
}

913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959
static void
fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps)
{
    const guint8 (* const ScalingList4x4)[6][16] = &pps->scaling_lists_4x4;
    guint i, j;

    /* There are always 6 4x4 scaling lists */
    g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
    g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);

    if (sizeof(iq_matrix->ScalingList4x4[0][0]) == 1)
        memcpy(iq_matrix->ScalingList4x4, *ScalingList4x4,
               sizeof(iq_matrix->ScalingList4x4));
    else {
        for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++) {
            for (j = 0; j < G_N_ELEMENTS(iq_matrix->ScalingList4x4[i]); j++)
                iq_matrix->ScalingList4x4[i][j] = (*ScalingList4x4)[i][j];
        }
    }
}

static void
fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps)
{
    const guint8 (* const ScalingList8x8)[6][64] = &pps->scaling_lists_8x8;
    const GstH264SPS * const sps = pps->sequence;
    guint i, j, n;

    /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
    if (!pps->transform_8x8_mode_flag)
        return;

    g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
    g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);

    if (sizeof(iq_matrix->ScalingList8x8[0][0]) == 1)
        memcpy(iq_matrix->ScalingList8x8, *ScalingList8x8,
               sizeof(iq_matrix->ScalingList8x8));
    else {
        n = (sps->chroma_format_idc != 3) ? 2 : 6;
        for (i = 0; i < n; i++) {
            for (j = 0; j < G_N_ELEMENTS(iq_matrix->ScalingList8x8[i]); j++)
                iq_matrix->ScalingList8x8[i][j] = (*ScalingList8x8)[i][j];
        }
    }
}

960
static GstVaapiDecoderStatus
961
ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
962
{
963
    GstVaapiPicture * const base_picture = &picture->base;
964 965
    GstH264PPS * const pps = picture->pps;
    GstH264SPS * const sps = pps->sequence;
966
    VAIQMatrixBufferH264 *iq_matrix;
967

968 969 970 971 972 973 974 975 976 977 978 979
    base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
    if (!base_picture->iq_matrix) {
        GST_ERROR("failed to allocate IQ matrix");
        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
    }
    iq_matrix = base_picture->iq_matrix->param;

    /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
       is not large enough to hold lists for 4:4:4 */
    if (sps->chroma_format_idc == 3)
        return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;

980 981
    fill_iq_matrix_4x4(iq_matrix, pps);
    fill_iq_matrix_8x8(iq_matrix, pps);
982

983 984 985
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
}

986
static GstVaapiDecoderStatus
987 988
decode_current_picture(GstVaapiDecoderH264 *decoder)
{
989
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
990 991 992
    GstVaapiPictureH264 * const picture = priv->current_picture;

    if (!picture)
993 994
        return GST_VAAPI_DECODER_STATUS_SUCCESS;

995 996 997
    if (!exec_ref_pic_marking(decoder, picture))
        goto error;
    if (!dpb_add(decoder, picture))
998
        goto error;
999
    if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
1000
        goto error;
1001 1002
    if (priv->prev_frame && gst_vaapi_frame_store_has_frame(priv->prev_frame))
        gst_vaapi_picture_replace(&priv->current_picture, NULL);
1003 1004 1005
    return GST_VAAPI_DECODER_STATUS_SUCCESS;

error:
1006
    /* XXX: fix for cases where first field failed to be decoded */
1007 1008
    gst_vaapi_picture_replace(&priv->current_picture, NULL);
    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1009 1010 1011
}

static GstVaapiDecoderStatus
1012
parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1013
{
1014
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
1015 1016
    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
    GstH264SPS * const sps = &pi->data.sps;
1017 1018
    GstH264ParserResult result;

1019
    GST_DEBUG("parse SPS");
1020

1021 1022 1023 1024
    /* Variables that don't have inferred values per the H.264
       standard but that should get a default value anyway */
    sps->log2_max_pic_order_cnt_lsb_minus4 = 0;

1025
    result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
1026 1027 1028
    if (result != GST_H264_PARSER_OK)
        return get_status(result);

1029
    priv->got_sps = TRUE;
1030
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
1031 1032 1033
}

static GstVaapiDecoderStatus
1034
parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1035
{
1036
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
1037 1038
    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
    GstH264PPS * const pps = &pi->data.pps;
1039 1040
    GstH264ParserResult result;

1041
    GST_DEBUG("parse PPS");
1042

1043 1044 1045 1046 1047
    /* Variables that don't have inferred values per the H.264
       standard but that should get a default value anyway */
    pps->slice_group_map_type = 0;
    pps->slice_group_change_rate_minus1 = 0;

1048
    result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
1049 1050 1051
    if (result != GST_H264_PARSER_OK)
        return get_status(result);

1052
    priv->got_pps = TRUE;
1053 1054 1055 1056
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
}

static GstVaapiDecoderStatus
1057
parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1058
{
1059
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
1060
    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1061 1062 1063
    GstH264SEIMessage sei;
    GstH264ParserResult result;

1064
    GST_DEBUG("parse SEI");
1065 1066

    memset(&sei, 0, sizeof(sei));
1067
    result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, &sei);
1068
    if (result != GST_H264_PARSER_OK) {
1069
        GST_WARNING("failed to parse SEI, payload type:%d", sei.payloadType);
1070
        return get_status(result);
1071
    }
1072 1073 1074 1075

    return GST_VAAPI_DECODER_STATUS_SUCCESS;
}

1076
static GstVaapiDecoderStatus
1077
parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1078
{
1079
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
1080 1081
    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
    GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
1082 1083 1084 1085
    GstH264ParserResult result;

    GST_DEBUG("parse slice");

1086 1087 1088 1089 1090
    /* Variables that don't have inferred values per the H.264
       standard but that should get a default value anyway */
    slice_hdr->cabac_init_idc = 0;
    slice_hdr->direct_spatial_mv_pred_flag = 0;

1091
    result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
1092 1093 1094 1095 1096 1097 1098
        slice_hdr, TRUE, TRUE);
    if (result != GST_H264_PARSER_OK)
        return get_status(result);

    return GST_VAAPI_DECODER_STATUS_SUCCESS;
}

1099 1100 1101
static GstVaapiDecoderStatus
decode_sequence_end(GstVaapiDecoderH264 *decoder)
{
1102
    GstVaapiDecoderStatus status;
1103 1104 1105

    GST_DEBUG("decode sequence-end");

1106 1107 1108 1109
    status = decode_current_picture(decoder);
    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
        return status;

1110
    dpb_flush(decoder);
1111
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
1112 1113 1114 1115 1116 1117 1118 1119 1120 1121
}

/* 8.2.1.1 - Decoding process for picture order count type 0 */
static void
init_picture_poc_0(
    GstVaapiDecoderH264 *decoder,
    GstVaapiPictureH264 *picture,
    GstH264SliceHdr     *slice_hdr
)
{
1122
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
1123 1124 1125
    GstH264PPS * const pps = slice_hdr->pps;
    GstH264SPS * const sps = pps->sequence;
    const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1126
    gint32 temp_poc;
1127 1128 1129

    GST_DEBUG("decode picture order count type 0");