gstrtph264depay.c 28.7 KB
Newer Older
1
/* GStreamer
2
 * Copyright (C) <2006> Wim Taymans <wim.taymans@gmail.com>
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
 *
 * This library is free software; you can redistribute it and/or
 * modify it under the terms of the GNU Library General Public
 * License as published by the Free Software Foundation; either
 * version 2 of the License, or (at your option) any later version.
 *
 * This library is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
 * Library General Public License for more details.
 *
 * You should have received a copy of the GNU Library General Public
 * License along with this library; if not, write to the
 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
 * Boston, MA 02111-1307, USA.
 */

#ifdef HAVE_CONFIG_H
#  include "config.h"
#endif

24
#include <stdio.h>
25
26
27
28
29
#include <string.h>

#include <gst/rtp/gstrtpbuffer.h>
#include "gstrtph264depay.h"

30
31
32
GST_DEBUG_CATEGORY_STATIC (rtph264depay_debug);
#define GST_CAT_DEFAULT (rtph264depay_debug)

33
#define DEFAULT_BYTE_STREAM	TRUE
34
#define DEFAULT_ACCESS_UNIT	FALSE
35
36
37
38
39

enum
{
  PROP_0,
  PROP_BYTE_STREAM,
40
  PROP_ACCESS_UNIT,
41
42
43
44
  PROP_LAST
};


45
46
/* 3 zero bytes syncword */
static const guint8 sync_bytes[] = { 0, 0, 0, 1 };
47

48
49
50
51
52
53
54
55
56
57
58
59
60
static GstStaticPadTemplate gst_rtp_h264_depay_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
    GST_PAD_SRC,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS ("video/x-h264")
    );

static GstStaticPadTemplate gst_rtp_h264_depay_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
    GST_PAD_SINK,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS ("application/x-rtp, "
        "media = (string) \"video\", "
61
        "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
        "clock-rate = (int) 90000, " "encoding-name = (string) \"H264\"")
        /** optional parameters **/
    /* "profile-level-id = (string) ANY, " */
    /* "max-mbps = (string) ANY, " */
    /* "max-fs = (string) ANY, " */
    /* "max-cpb = (string) ANY, " */
    /* "max-dpb = (string) ANY, " */
    /* "max-br = (string) ANY, " */
    /* "redundant-pic-cap = (string) { \"0\", \"1\" }, " */
    /* "sprop-parameter-sets = (string) ANY, " */
    /* "parameter-add = (string) { \"0\", \"1\" }, " */
    /* "packetization-mode = (string) { \"0\", \"1\", \"2\" }, " */
    /* "sprop-interleaving-depth = (string) ANY, " */
    /* "sprop-deint-buf-req = (string) ANY, " */
    /* "deint-buf-cap = (string) ANY, " */
    /* "sprop-init-buf-time = (string) ANY, " */
    /* "sprop-max-don-diff = (string) ANY, " */
    /* "max-rcmd-nalu-size = (string) ANY " */
    );

GST_BOILERPLATE (GstRtpH264Depay, gst_rtp_h264_depay, GstBaseRTPDepayload,
    GST_TYPE_BASE_RTP_DEPAYLOAD);

static void gst_rtp_h264_depay_finalize (GObject * object);
86
87
88
89
static void gst_rtp_h264_depay_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec);
static void gst_rtp_h264_depay_get_property (GObject * object, guint prop_id,
    GValue * value, GParamSpec * pspec);
90
91
92
93
94
95
96
97

static GstStateChangeReturn gst_rtp_h264_depay_change_state (GstElement *
    element, GstStateChange transition);

static GstBuffer *gst_rtp_h264_depay_process (GstBaseRTPDepayload * depayload,
    GstBuffer * buf);
static gboolean gst_rtp_h264_depay_setcaps (GstBaseRTPDepayload * filter,
    GstCaps * caps);
98
99
static gboolean gst_rtp_h264_depay_handle_event (GstBaseRTPDepayload * depay,
    GstEvent * event);
100
101
102
103
104
105

static void
gst_rtp_h264_depay_base_init (gpointer klass)
{
  GstElementClass *element_class = GST_ELEMENT_CLASS (klass);

106
107
108
109
  gst_element_class_add_static_pad_template (element_class,
      &gst_rtp_h264_depay_src_template);
  gst_element_class_add_static_pad_template (element_class,
      &gst_rtp_h264_depay_sink_template);
110

111
  gst_element_class_set_details_simple (element_class, "RTP H264 depayloader",
Wim Taymans's avatar
Wim Taymans committed
112
      "Codec/Depayloader/Network/RTP",
113
114
      "Extracts H264 video from RTP packets (RFC 3984)",
      "Wim Taymans <wim.taymans@gmail.com>");
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
}

static void
gst_rtp_h264_depay_class_init (GstRtpH264DepayClass * klass)
{
  GObjectClass *gobject_class;
  GstElementClass *gstelement_class;
  GstBaseRTPDepayloadClass *gstbasertpdepayload_class;

  gobject_class = (GObjectClass *) klass;
  gstelement_class = (GstElementClass *) klass;
  gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;

  gobject_class->finalize = gst_rtp_h264_depay_finalize;

130
131
132
133
134
  gobject_class->set_property = gst_rtp_h264_depay_set_property;
  gobject_class->get_property = gst_rtp_h264_depay_get_property;

  g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_BYTE_STREAM,
      g_param_spec_boolean ("byte-stream", "Byte Stream",
135
136
          "Generate byte stream format of NALU (deprecated; use caps)",
          DEFAULT_BYTE_STREAM, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
137
138
  g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_ACCESS_UNIT,
      g_param_spec_boolean ("access-unit", "Access Unit",
139
140
          "Merge NALU into AU (picture) (deprecated; use caps)",
          DEFAULT_ACCESS_UNIT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
141

142
  gstelement_class->change_state = gst_rtp_h264_depay_change_state;
143

144
145
  gstbasertpdepayload_class->process = gst_rtp_h264_depay_process;
  gstbasertpdepayload_class->set_caps = gst_rtp_h264_depay_setcaps;
146
  gstbasertpdepayload_class->handle_event = gst_rtp_h264_depay_handle_event;
147

148
149
  GST_DEBUG_CATEGORY_INIT (rtph264depay_debug, "rtph264depay", 0,
      "H264 Video RTP Depayloader");
150
151
152
153
154
155
156
}

static void
gst_rtp_h264_depay_init (GstRtpH264Depay * rtph264depay,
    GstRtpH264DepayClass * klass)
{
  rtph264depay->adapter = gst_adapter_new ();
157
  rtph264depay->picture_adapter = gst_adapter_new ();
158
  rtph264depay->byte_stream = DEFAULT_BYTE_STREAM;
159
  rtph264depay->merge = DEFAULT_ACCESS_UNIT;
160
161
}

162
163
164
165
166
167
168
169
170
static void
gst_rtp_h264_depay_reset (GstRtpH264Depay * rtph264depay)
{
  gst_adapter_clear (rtph264depay->adapter);
  rtph264depay->wait_start = TRUE;
  gst_adapter_clear (rtph264depay->picture_adapter);
  rtph264depay->picture_start = FALSE;
  rtph264depay->last_keyframe = FALSE;
  rtph264depay->last_ts = 0;
171
  rtph264depay->current_fu_type = 0;
172
173
}

174
175
176
177
178
179
180
static void
gst_rtp_h264_depay_finalize (GObject * object)
{
  GstRtpH264Depay *rtph264depay;

  rtph264depay = GST_RTP_H264_DEPAY (object);

181
182
183
  if (rtph264depay->codec_data)
    gst_buffer_unref (rtph264depay->codec_data);

184
  g_object_unref (rtph264depay->adapter);
185
  g_object_unref (rtph264depay->picture_adapter);
186
187
188
189

  G_OBJECT_CLASS (parent_class)->finalize (object);
}

190
191
192
193
194
195
196
197
198
199
200
201
static void
gst_rtp_h264_depay_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstRtpH264Depay *rtph264depay;

  rtph264depay = GST_RTP_H264_DEPAY (object);

  switch (prop_id) {
    case PROP_BYTE_STREAM:
      rtph264depay->byte_stream = g_value_get_boolean (value);
      break;
202
203
204
    case PROP_ACCESS_UNIT:
      rtph264depay->merge = g_value_get_boolean (value);
      break;
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}

static void
gst_rtp_h264_depay_get_property (GObject * object, guint prop_id,
    GValue * value, GParamSpec * pspec)
{
  GstRtpH264Depay *rtph264depay;

  rtph264depay = GST_RTP_H264_DEPAY (object);

  switch (prop_id) {
    case PROP_BYTE_STREAM:
      g_value_set_boolean (value, rtph264depay->byte_stream);
      break;
223
224
225
    case PROP_ACCESS_UNIT:
      g_value_set_boolean (value, rtph264depay->merge);
      break;
226
227
228
229
230
231
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}

232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
static void
gst_rtp_h264_depay_negotiate (GstRtpH264Depay * rtph264depay)
{
  GstCaps *caps;
  gint byte_stream = -1;
  gint merge = -1;

  caps =
      gst_pad_get_allowed_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (rtph264depay));

  GST_DEBUG_OBJECT (rtph264depay, "allowed caps: %" GST_PTR_FORMAT, caps);

  if (caps) {
    if (gst_caps_get_size (caps) > 0) {
      GstStructure *s = gst_caps_get_structure (caps, 0);
      const gchar *str = NULL;

      if ((str = gst_structure_get_string (s, "stream-format"))) {
        if (strcmp (str, "avc") == 0) {
          byte_stream = FALSE;
        } else if (strcmp (str, "byte-stream") == 0) {
          byte_stream = TRUE;
        } else {
          GST_DEBUG_OBJECT (rtph264depay, "unknown stream-format: %s", str);
        }
      }

      if ((str = gst_structure_get_string (s, "alignment"))) {
        if (strcmp (str, "au") == 0) {
          merge = TRUE;
        } else if (strcmp (str, "nal") == 0) {
          merge = FALSE;
        } else {
          GST_DEBUG_OBJECT (rtph264depay, "unknown alignment: %s", str);
        }
      }
    }
    gst_caps_unref (caps);
  }

  if (byte_stream >= 0) {
    GST_DEBUG_OBJECT (rtph264depay, "downstream requires byte-stream %d",
        byte_stream);
    if (rtph264depay->byte_stream != byte_stream) {
      GST_WARNING_OBJECT (rtph264depay,
          "overriding property setting based on caps");
      rtph264depay->byte_stream = byte_stream;
    }
  }
  if (merge >= 0) {
    GST_DEBUG_OBJECT (rtph264depay, "downstream requires merge %d", merge);
    if (rtph264depay->merge != merge) {
      GST_WARNING_OBJECT (rtph264depay,
          "overriding property setting based on caps");
      rtph264depay->merge = merge;
    }
  }
}

291
static gboolean
292
gst_rtp_h264_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
293
{
294
295
  GstCaps *srccaps;
  gint clock_rate;
296
  GstStructure *structure = gst_caps_get_structure (caps, 0);
297
  GstRtpH264Depay *rtph264depay;
298
299
300
  const gchar *ps, *profile;
  GstBuffer *codec_data;
  guint8 *b64;
301
  gboolean res;
302
303

  rtph264depay = GST_RTP_H264_DEPAY (depayload);
304

305
306
  if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
    clock_rate = 90000;
307
  depayload->clock_rate = clock_rate;
308
309
310

  srccaps = gst_caps_new_simple ("video/x-h264", NULL);

311
312
313
314
315
  /* Base64 encoded, comma separated config NALs */
  ps = gst_structure_get_string (structure, "sprop-parameter-sets");
  /* hex: AVCProfileIndication:8 | profile_compat:8 | AVCLevelIndication:8 */
  profile = gst_structure_get_string (structure, "profile-level-id");

316
317
318
  /* negotiate with downstream w.r.t. output format and alignment */
  gst_rtp_h264_depay_negotiate (rtph264depay);

319
320
321
  if (rtph264depay->byte_stream && ps != NULL) {
    /* for bytestream we only need the parameter sets but we don't error out
     * when they are not there, we assume they are in the stream. */
322
323
324
325
326
327
    gchar **params;
    guint len, total;
    gint i;

    params = g_strsplit (ps, ",", 0);

328
329
    /* count total number of bytes in base64. Also include the sync bytes in
     * front of the params. */
330
331
332
    len = 0;
    for (i = 0; params[i]; i++) {
      len += strlen (params[i]);
333
      len += sizeof (sync_bytes);
334
335
336
337
338
339
    }
    /* we seriously overshoot the length, but it's fine. */
    codec_data = gst_buffer_new_and_alloc (len);
    b64 = GST_BUFFER_DATA (codec_data);
    total = 0;
    for (i = 0; params[i]; i++) {
340
341
342
      guint save = 0;
      gint state = 0;

343
      GST_DEBUG_OBJECT (depayload, "decoding param %d (%s)", i, params[i]);
344
345
      memcpy (b64, sync_bytes, sizeof (sync_bytes));
      b64 += sizeof (sync_bytes);
346
347
348
349
      len =
          g_base64_decode_step (params[i], strlen (params[i]), b64, &state,
          &save);
      GST_DEBUG_OBJECT (depayload, "decoded %d bytes", len);
350
      total += len + sizeof (sync_bytes);
351
352
353
      b64 += len;
    }
    GST_BUFFER_SIZE (codec_data) = total;
354
    g_strfreev (params);
355

356
357
358
359
360
361
    /* keep the codec_data, we need to send it as the first buffer. We cannot
     * push it in the adapter because the adapter might be flushed on discont.
     */
    if (rtph264depay->codec_data)
      gst_buffer_unref (rtph264depay->codec_data);
    rtph264depay->codec_data = codec_data;
362
363
364
365
366
367
368
  } else if (!rtph264depay->byte_stream) {
    gchar **params;
    guint8 **sps, **pps;
    guint len, num_sps, num_pps;
    gint i;
    guint8 *data;

369
    if (ps == NULL)
370
371
372
373
374
375
376
377
378
379
380
381
382
383
      goto incomplete_caps;

    params = g_strsplit (ps, ",", 0);
    len = g_strv_length (params);

    GST_DEBUG_OBJECT (depayload, "we have %d params", len);

    sps = g_new0 (guint8 *, len + 1);
    pps = g_new0 (guint8 *, len + 1);
    num_sps = num_pps = 0;

    /* start with 7 bytes header */
    len = 7;
    for (i = 0; params[i]; i++) {
384
      gsize nal_len;
385
      guint8 *nalp;
386
387
      guint save = 0;
      gint state = 0;
388
389
390

      nal_len = strlen (params[i]);
      nalp = g_malloc (nal_len + 2);
391
392
393

      nal_len =
          g_base64_decode_step (params[i], nal_len, nalp + 2, &state, &save);
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
      nalp[0] = (nal_len >> 8) & 0xff;
      nalp[1] = nal_len & 0xff;
      len += nal_len + 2;

      /* copy to the right list */
      if ((nalp[2] & 0x1f) == 7) {
        GST_DEBUG_OBJECT (depayload, "adding param %d as SPS %d", i, num_sps);
        sps[num_sps++] = nalp;
      } else {
        GST_DEBUG_OBJECT (depayload, "adding param %d as PPS %d", i, num_pps);
        pps[num_pps++] = nalp;
      }
    }
    g_strfreev (params);

409
410
411
412
413
414
    if (num_sps == 0 || (GST_READ_UINT16_BE (sps[0]) < 3) || num_pps == 0) {
      g_strfreev ((gchar **) pps);
      g_strfreev ((gchar **) sps);
      goto incomplete_caps;
    }

415
416
417
418
419
    codec_data = gst_buffer_new_and_alloc (len);
    data = GST_BUFFER_DATA (codec_data);

    /* 8 bits version == 1 */
    *data++ = 1;
420
421
422
423
424
425
426
427
428
429
430
431
432
433
    if (profile) {
      guint32 profile_id;

      /* hex: AVCProfileIndication:8 | profile_compat:8 | AVCLevelIndication:8 */
      sscanf (profile, "%6x", &profile_id);
      *data++ = (profile_id >> 16) & 0xff;
      *data++ = (profile_id >> 8) & 0xff;
      *data++ = profile_id & 0xff;
    } else {
      /* extract from SPS */
      *data++ = sps[0][3];
      *data++ = sps[0][4];
      *data++ = sps[0][5];
    }
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
    /* 6 bits reserved | 2 bits lengthSizeMinusOn */
    *data++ = 0xff;
    /* 3 bits reserved | 5 bits numOfSequenceParameterSets */
    *data++ = 0xe0 | (num_sps & 0x1f);

    /* copy all SPS */
    for (i = 0; sps[i]; i++) {
      len = ((sps[i][0] << 8) | sps[i][1]) + 2;
      GST_DEBUG_OBJECT (depayload, "copy SPS %d of length %d", i, len);
      memcpy (data, sps[i], len);
      g_free (sps[i]);
      data += len;
    }
    g_free (sps);
    /* 8 bits numOfPictureParameterSets */
    *data++ = num_pps;
    /* copy all PPS */
    for (i = 0; pps[i]; i++) {
      len = ((pps[i][0] << 8) | pps[i][1]) + 2;
      GST_DEBUG_OBJECT (depayload, "copy PPS %d of length %d", i, len);
      memcpy (data, pps[i], len);
      g_free (pps[i]);
      data += len;
    }
    g_free (pps);
    GST_BUFFER_SIZE (codec_data) = data - GST_BUFFER_DATA (codec_data);

    gst_caps_set_simple (srccaps,
        "codec_data", GST_TYPE_BUFFER, codec_data, NULL);
463
    gst_buffer_unref (codec_data);
464
465
  }

466
467
468
469
  gst_caps_set_simple (srccaps, "stream-format", G_TYPE_STRING,
      rtph264depay->byte_stream ? "byte-stream" : "avc",
      "alignment", G_TYPE_STRING, rtph264depay->merge ? "au" : "nal", NULL);

470
  res = gst_pad_set_caps (depayload->srcpad, srccaps);
471
472
  gst_caps_unref (srccaps);

473
  return res;
474
475
476
477
478

  /* ERRORS */
incomplete_caps:
  {
    GST_DEBUG_OBJECT (depayload, "we have incomplete caps");
479
    gst_caps_unref (srccaps);
480
481
    return FALSE;
  }
482
483
}

484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
static GstBuffer *
gst_rtp_h264_complete_au (GstRtpH264Depay * rtph264depay,
    GstClockTime * out_timestamp, gboolean * out_keyframe)
{
  guint outsize;
  GstBuffer *outbuf;

  /* we had a picture in the adapter and we completed it */
  GST_DEBUG_OBJECT (rtph264depay, "taking completed AU");
  outsize = gst_adapter_available (rtph264depay->picture_adapter);
  outbuf = gst_adapter_take_buffer (rtph264depay->picture_adapter, outsize);

  *out_timestamp = rtph264depay->last_ts;
  *out_keyframe = rtph264depay->last_keyframe;

  rtph264depay->last_keyframe = FALSE;
  rtph264depay->picture_start = FALSE;

  return outbuf;
}

505
506
507
/* SPS/PPS/IDR considered key, all others DELTA;
 * so downstream waiting for keyframe can pick up at SPS/PPS/IDR */
#define NAL_TYPE_IS_KEY(nt) (((nt) == 5) || ((nt) == 7) || ((nt) == 8))
508

509
static GstBuffer *
510
gst_rtp_h264_depay_handle_nal (GstRtpH264Depay * rtph264depay, GstBuffer * nal,
511
    GstClockTime in_timestamp, gboolean marker)
512
{
513
  GstBaseRTPDepayload *depayload = GST_BASE_RTP_DEPAYLOAD (rtph264depay);
514
  gint nal_type;
515
  guint size;
516
517
  guint8 *data;
  GstBuffer *outbuf = NULL;
518
  GstClockTime out_timestamp;
519
  gboolean keyframe, out_keyframe;
520
521
522
523

  size = GST_BUFFER_SIZE (nal);
  if (G_UNLIKELY (size < 5))
    goto short_nal;
524
525
526
527

  data = GST_BUFFER_DATA (nal);

  nal_type = data[4] & 0x1f;
528
529
  GST_DEBUG_OBJECT (rtph264depay, "handle NAL type %d", nal_type);

530
  keyframe = NAL_TYPE_IS_KEY (nal_type);
531

532
  out_keyframe = keyframe;
533
534
535
536
537
538
539
540
541
542
543
544
545
546
  out_timestamp = in_timestamp;

  if (rtph264depay->merge) {
    gboolean start = FALSE, complete = FALSE;

    /* consider a coded slices (IDR or not) to start a picture,
     * (so ending the previous one) if first_mb_in_slice == 0
     * (non-0 is part of previous one) */
    /* NOTE this is not entirely according to Access Unit specs in 7.4.1.2.4,
     * but in practice it works in sane cases, needs not much parsing,
     * and also works with broken frame_num in NAL (where spec-wise would fail) */
    if (nal_type == 1 || nal_type == 2 || nal_type == 5) {
      /* we have a picture start */
      start = TRUE;
547
      if (data[5] & 0x80) {
548
549
        /* first_mb_in_slice == 0 completes a picture */
        complete = TRUE;
550
      }
551
552
553
    } else if (nal_type >= 6 && nal_type <= 9) {
      /* SEI, SPS, PPS, AU terminate picture */
      complete = TRUE;
554
    }
555
    GST_DEBUG_OBJECT (depayload, "start %d, complete %d", start, complete);
556

557
558
559
    if (complete && rtph264depay->picture_start)
      outbuf = gst_rtp_h264_complete_au (rtph264depay, &out_timestamp,
          &out_keyframe);
560
561
562
563
564

    /* add to adapter */
    GST_DEBUG_OBJECT (depayload, "adding NAL to picture adapter");
    gst_adapter_push (rtph264depay->picture_adapter, nal);
    rtph264depay->last_ts = in_timestamp;
565
    rtph264depay->last_keyframe |= keyframe;
566
    rtph264depay->picture_start |= start;
567
568
569
570

    if (marker)
      outbuf = gst_rtp_h264_complete_au (rtph264depay, &out_timestamp,
          &out_keyframe);
571
572
573
574
575
  } else {
    /* no merge, output is input nal */
    GST_DEBUG_OBJECT (depayload, "using NAL as output");
    outbuf = nal;
  }
576

577
578
579
580
  if (outbuf) {
    /* prepend codec_data */
    if (rtph264depay->codec_data) {
      GST_DEBUG_OBJECT (depayload, "prepending codec_data");
581
582
      outbuf = gst_buffer_join (rtph264depay->codec_data, outbuf);
      rtph264depay->codec_data = NULL;
583
      out_keyframe = TRUE;
584
    }
585
    outbuf = gst_buffer_make_metadata_writable (outbuf);
586

587
    GST_BUFFER_TIMESTAMP (outbuf) = out_timestamp;
588

589
    if (out_keyframe)
590
      GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
591
592
    else
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
593
594

    gst_buffer_set_caps (outbuf, GST_PAD_CAPS (depayload->srcpad));
595
596
  }

597
  return outbuf;
598

599
600
601
602
603
  /* ERRORS */
short_nal:
  {
    GST_WARNING_OBJECT (depayload, "dropping short NAL");
    gst_buffer_unref (nal);
604
    return NULL;
605
  }
606
607
}

608
609
610
static GstBuffer *
gst_rtp_h264_push_fragmentation_unit (GstRtpH264Depay * rtph264depay,
    gboolean send)
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
{
  guint outsize;
  guint8 *outdata;
  GstBuffer *outbuf;

  outsize = gst_adapter_available (rtph264depay->adapter);
  outbuf = gst_adapter_take_buffer (rtph264depay->adapter, outsize);
  outdata = GST_BUFFER_DATA (outbuf);

  GST_DEBUG_OBJECT (rtph264depay, "output %d bytes", outsize);

  if (rtph264depay->byte_stream) {
    memcpy (outdata, sync_bytes, sizeof (sync_bytes));
  } else {
    outsize -= 4;
    outdata[0] = (outsize >> 24);
    outdata[1] = (outsize >> 16);
    outdata[2] = (outsize >> 8);
    outdata[3] = (outsize);
  }

  rtph264depay->current_fu_type = 0;
633
634
635
636
637
638
639
640
641
642
643
644

  if (send) {
    outbuf = gst_rtp_h264_depay_handle_nal (rtph264depay, outbuf,
        rtph264depay->fu_timestamp, rtph264depay->fu_marker);
    if (outbuf)
      gst_base_rtp_depayload_push (GST_BASE_RTP_DEPAYLOAD (rtph264depay),
          outbuf);
    return NULL;
  } else {
    return gst_rtp_h264_depay_handle_nal (rtph264depay, outbuf,
        rtph264depay->fu_timestamp, rtph264depay->fu_marker);
  }
645
646
}

647
648
649
650
static GstBuffer *
gst_rtp_h264_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{
  GstRtpH264Depay *rtph264depay;
651
  GstBuffer *outbuf = NULL;
652
  guint8 nal_unit_type;
653
654
655

  rtph264depay = GST_RTP_H264_DEPAY (depayload);

656
657
658
659
  /* flush remaining data on discont */
  if (GST_BUFFER_IS_DISCONT (buf)) {
    gst_adapter_clear (rtph264depay->adapter);
    rtph264depay->wait_start = TRUE;
660
    rtph264depay->current_fu_type = 0;
661
662
  }

663
664
665
666
  {
    gint payload_len;
    guint8 *payload;
    guint header_len;
667
668
669
    guint8 nal_ref_idc;
    guint8 *outdata;
    guint outsize, nalu_size;
670
    GstClockTime timestamp;
671
    gboolean marker;
672

673
    timestamp = GST_BUFFER_TIMESTAMP (buf);
674
675
676

    payload_len = gst_rtp_buffer_get_payload_len (buf);
    payload = gst_rtp_buffer_get_payload (buf);
677
    marker = gst_rtp_buffer_get_marker (buf);
678

679
680
    GST_DEBUG_OBJECT (rtph264depay, "receiving %d bytes", payload_len);

681
682
683
    if (payload_len == 0)
      return NULL;

684
685
686
687
688
689
690
691
692
693
694
    /* +---------------+
     * |0|1|2|3|4|5|6|7|
     * +-+-+-+-+-+-+-+-+
     * |F|NRI|  Type   |
     * +---------------+
     *
     * F must be 0.
     */
    nal_ref_idc = (payload[0] & 0x60) >> 5;
    nal_unit_type = payload[0] & 0x1f;

695
696
697
    /* at least one byte header with type */
    header_len = 1;

698
699
700
    GST_DEBUG_OBJECT (rtph264depay, "NRI %d, Type %d", nal_ref_idc,
        nal_unit_type);

701
702
703
704
705
    /* If FU unit was being processed, but the current nal is of a different
     * type.  Assume that the remote payloader is buggy (didn't set the end bit
     * when the FU ended) and send out what we gathered thusfar */
    if (G_UNLIKELY (rtph264depay->current_fu_type != 0 &&
            nal_unit_type != rtph264depay->current_fu_type))
706
      gst_rtp_h264_push_fragmentation_unit (rtph264depay, TRUE);
707

708
709
710
711
712
713
714
715
    switch (nal_unit_type) {
      case 0:
      case 30:
      case 31:
        /* undefined */
        goto undefined_type;
      case 25:
        /* STAP-B    Single-time aggregation packet     5.7.1 */
716
717
718
719
720
721
722
723
724
        /* 2 byte extra header for DON */
        header_len += 2;
        /* fallthrough */
      case 24:
      {
        /* strip headers */
        payload += header_len;
        payload_len -= header_len;

725
726
        rtph264depay->wait_start = FALSE;

727

728
729
730
731
732
733
734
735
736
737
        /* STAP-A    Single-time aggregation packet     5.7.1 */
        while (payload_len > 2) {
          /*                      1          
           *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 
           * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
           * |         NALU Size             |
           * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
           */
          nalu_size = (payload[0] << 8) | payload[1];

738
739
740
          /* dont include nalu_size */
          if (nalu_size > (payload_len - 2))
            nalu_size = payload_len - 2;
741
742
743
744

          outsize = nalu_size + sizeof (sync_bytes);
          outbuf = gst_buffer_new_and_alloc (outsize);
          outdata = GST_BUFFER_DATA (outbuf);
745
746
747
748
749
750
751
752
753
754
755
756
          if (rtph264depay->byte_stream) {
            memcpy (outdata, sync_bytes, sizeof (sync_bytes));
          } else {
            outdata[0] = outdata[1] = 0;
            outdata[2] = payload[0];
            outdata[3] = payload[1];
          }

          /* strip NALU size */
          payload += 2;
          payload_len -= 2;

757
758
759
          outdata += sizeof (sync_bytes);
          memcpy (outdata, payload, nalu_size);

760
          gst_adapter_push (rtph264depay->adapter, outbuf);
761
762
763
764
765

          payload += nalu_size;
          payload_len -= nalu_size;
        }

766
767
        outsize = gst_adapter_available (rtph264depay->adapter);
        outbuf = gst_adapter_take_buffer (rtph264depay->adapter, outsize);
768

769
770
        outbuf = gst_rtp_h264_depay_handle_nal (rtph264depay, outbuf, timestamp,
            marker);
771
        break;
772
      }
773
774
      case 26:
        /* MTAP16    Multi-time aggregation packet      5.7.2 */
775
        header_len = 5;
776
        /* fallthrough, not implemented */
777
778
      case 27:
        /* MTAP24    Multi-time aggregation packet      5.7.2 */
779
        header_len = 6;
780
        goto not_implemented;
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
        break;
      case 28:
      case 29:
      {
        /* FU-A      Fragmentation unit                 5.8 */
        /* FU-B      Fragmentation unit                 5.8 */
        gboolean S, E;

        /* +---------------+
         * |0|1|2|3|4|5|6|7|
         * +-+-+-+-+-+-+-+-+
         * |S|E|R|  Type   |
         * +---------------+
         *
         * R is reserved and always 0
         */
        S = (payload[1] & 0x80) == 0x80;
        E = (payload[1] & 0x40) == 0x40;

        GST_DEBUG_OBJECT (rtph264depay, "S %d, E %d", S, E);

802
803
804
        if (rtph264depay->wait_start && !S)
          goto waiting_start;

805
806
        if (S) {
          /* NAL unit starts here */
807
          guint8 nal_header;
808

809
810
811
812
          /* If a new FU unit started, while still processing an older one.
           * Assume that the remote payloader is buggy (doesn't set the end
           * bit) and send out what we've gathered thusfar */
          if (G_UNLIKELY (rtph264depay->current_fu_type != 0))
813
            gst_rtp_h264_push_fragmentation_unit (rtph264depay, TRUE);
814
815
816
817

          rtph264depay->current_fu_type = nal_unit_type;
          rtph264depay->fu_timestamp = timestamp;

818
819
          rtph264depay->wait_start = FALSE;

820
821
822
823
824
825
826
          /* reconstruct NAL header */
          nal_header = (payload[0] & 0xe0) | (payload[1] & 0x1f);

          /* strip type header, keep FU header, we'll reuse it to reconstruct
           * the NAL header. */
          payload += 1;
          payload_len -= 1;
827

828
829
830
          nalu_size = payload_len;
          outsize = nalu_size + sizeof (sync_bytes);
          outbuf = gst_buffer_new_and_alloc (outsize);
831
          outdata = GST_BUFFER_DATA (outbuf);
832
833
834
835
836
          outdata += sizeof (sync_bytes);
          memcpy (outdata, payload, nalu_size);
          outdata[0] = nal_header;

          GST_DEBUG_OBJECT (rtph264depay, "queueing %d bytes", outsize);
837
838
839
840

          /* and assemble in the adapter */
          gst_adapter_push (rtph264depay->adapter, outbuf);
        } else {
841
842
843
          /* strip off FU indicator and FU header bytes */
          payload += 2;
          payload_len -= 2;
844

845
          outsize = payload_len;
846
847
          outbuf = gst_buffer_new_and_alloc (outsize);
          outdata = GST_BUFFER_DATA (outbuf);
848
849
850
          memcpy (outdata, payload, outsize);

          GST_DEBUG_OBJECT (rtph264depay, "queueing %d bytes", outsize);
851
852
853
854
855

          /* and assemble in the adapter */
          gst_adapter_push (rtph264depay->adapter, outbuf);
        }

856
        outbuf = NULL;
857
        rtph264depay->fu_marker = marker;
858

859
860
        /* if NAL unit ends, flush the adapter */
        if (E)
861
          outbuf = gst_rtp_h264_push_fragmentation_unit (rtph264depay, FALSE);
862
863
864
865
        break;
      }
      default:
      {
866
867
        rtph264depay->wait_start = FALSE;

868
869
        /* 1-23   NAL unit  Single NAL unit packet per H.264   5.6 */
        /* the entire payload is the output buffer */
870
871
        nalu_size = payload_len;
        outsize = nalu_size + sizeof (sync_bytes);
872
873
        outbuf = gst_buffer_new_and_alloc (outsize);
        outdata = GST_BUFFER_DATA (outbuf);
874
875
876
877
878
879
880
        if (rtph264depay->byte_stream) {
          memcpy (outdata, sync_bytes, sizeof (sync_bytes));
        } else {
          outdata[0] = outdata[1] = 0;
          outdata[2] = nalu_size >> 8;
          outdata[3] = nalu_size & 0xff;
        }
881
882
        outdata += sizeof (sync_bytes);
        memcpy (outdata, payload, nalu_size);
883

884
885
        outbuf = gst_rtp_h264_depay_handle_nal (rtph264depay, outbuf, timestamp,
            marker);
886
        break;
887
888
889
890
      }
    }
  }

891
  return outbuf;
892
893
894
895
896

  /* ERRORS */
undefined_type:
  {
    GST_ELEMENT_WARNING (rtph264depay, STREAM, DECODE,
897
        (NULL), ("Undefined packet type"));
898
899
    return NULL;
  }
900
901
902
903
904
waiting_start:
  {
    GST_DEBUG_OBJECT (rtph264depay, "waiting for start");
    return NULL;
  }
905
906
907
not_implemented:
  {
    GST_ELEMENT_ERROR (rtph264depay, STREAM, FORMAT,
908
        (NULL), ("NAL unit type %d not supported yet", nal_unit_type));
909
910
    return NULL;
  }
911
912
}

913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
static gboolean
gst_rtp_h264_depay_handle_event (GstBaseRTPDepayload * depay, GstEvent * event)
{
  GstRtpH264Depay *rtph264depay;

  rtph264depay = GST_RTP_H264_DEPAY (depay);

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_FLUSH_STOP:
      gst_rtp_h264_depay_reset (rtph264depay);
      break;
    default:
      break;
  }

  return
      GST_BASE_RTP_DEPAYLOAD_CLASS (parent_class)->handle_event (depay, event);
}

932
933
934
935
936
937
938
939
940
941
942
943
944
static GstStateChangeReturn
gst_rtp_h264_depay_change_state (GstElement * element,
    GstStateChange transition)
{
  GstRtpH264Depay *rtph264depay;
  GstStateChangeReturn ret;

  rtph264depay = GST_RTP_H264_DEPAY (element);

  switch (transition) {
    case GST_STATE_CHANGE_NULL_TO_READY:
      break;
    case GST_STATE_CHANGE_READY_TO_PAUSED:
945
      gst_rtp_h264_depay_reset (rtph264depay);
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
      break;
    default:
      break;
  }

  ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);

  switch (transition) {
    case GST_STATE_CHANGE_READY_TO_NULL:
      break;
    default:
      break;
  }
  return ret;
}

gboolean
gst_rtp_h264_depay_plugin_init (GstPlugin * plugin)
{
  return gst_element_register (plugin, "rtph264depay",
966
      GST_RANK_SECONDARY, GST_TYPE_RTP_H264_DEPAY);
967
}