...
 
Commits (16)
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -91,5 +91,6 @@ foreach plugin_name: list_plugin_res.stdout().split(':')
disable_incremental_build: true,
gst_cache_file: plugins_cache,
gst_plugin_name: plugin_name,
include_paths: join_paths(meson.current_source_dir(), '..'),
)]
endforeach
......@@ -18,7 +18,7 @@ if dv_dep.found()
install_dir : plugins_install_dir,
)
pkgconfig.generate(gstdv, install_dir : plugins_pkgconfig_install_dir)
plugins += [gstdv]
# FIXME
#executable('smpte_test',
# 'smpte_test.c', 'gstsmptetimecode.c',
......
......@@ -48,7 +48,7 @@
* This element is meant for easy no-hassle video snapshotting. It is not
* suitable for video playback or video display at high framerates. Use
* ximagesink, xvimagesink or some other suitable video sink in connection
* with the #GstXOverlay interface instead if you want to do video playback.
* with the #GstVideoOverlay interface instead if you want to do video playback.
*
* ## Message details
*
......@@ -60,7 +60,7 @@
*
* * `pixbuf`: the #GdkPixbuf object
* * `pixel-aspect-ratio`: the pixel aspect ratio (PAR) of the input image
* (this field contains a #GstFraction); the
* (this field contains a value of type #GST_TYPE_FRACTION); the
* PAR is usually 1:1 for images, but is often something non-1:1 in the case
* of video input. In this case the image may be distorted and you may need
* to rescale it accordingly before saving it to file or displaying it. This
......
......@@ -53,5 +53,6 @@ if gtk_dep.found()
install_dir : plugins_install_dir,
)
pkgconfig.generate(gstgtk, install_dir : plugins_pkgconfig_install_dir)
plugins += [gstgtk]
endif
......@@ -30,16 +30,16 @@
* consideration. See <ulink url="http://www.vorbis.com/">Ogg/Vorbis</ulink>
* for a royalty free (and often higher quality) alternative.
*
* <refsect2>
* <title>Output sample rate</title>
* ## Output sample rate
*
* If no fixed output sample rate is negotiated on the element's src pad,
* the element will choose an optimal sample rate to resample to internally.
* For example, a 16-bit 44.1 KHz mono audio stream encoded at 48 kbit will
* get resampled to 32 KHz. Use filter caps on the src pad to force a
* particular sample rate.
* </refsect2>
* <refsect2>
* <title>Example pipelines</title>
*
* ## Example pipelines
*
* |[
* gst-launch-1.0 -v audiotestsrc wave=sine num-buffers=100 ! audioconvert ! lamemp3enc ! filesink location=sine.mp3
* ]| Encode a test sine signal to MP3.
......@@ -55,7 +55,6 @@
* |[
* gst-launch-1.0 -v audiotestsrc num-buffers=10 ! audio/x-raw,rate=44100,channels=1 ! lamemp3enc target=bitrate cbr=true bitrate=48 ! filesink location=test.mp3
* ]| Encode to a fixed sample rate
* </refsect2>
*/
#ifdef HAVE_CONFIG_H
......
......@@ -22,12 +22,11 @@
*
* Audio decoder for MPEG-1 layer 1/2/3 audio data.
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
*
* |[
* gst-launch-1.0 filesrc location=music.mp3 ! mpegaudioparse ! mpg123audiodec ! audioconvert ! audioresample ! autoaudiosink
* ]| Decode and play the mp3 file
* </refsect2>
*/
#ifdef HAVE_CONFIG_H
......
......@@ -160,6 +160,7 @@ if have_cxx and build_gstgl
install: true,
install_dir : plugins_install_dir)
pkgconfig.generate(gstqmlgl, install_dir : plugins_pkgconfig_install_dir)
plugins += [gstqmlgl]
endif
endif
endif
......@@ -31,7 +31,7 @@
* If the server is not an Icecast server, it will behave as if the
* #GstSoupHTTPSrc:iradio-mode property were not set. If it is, souphttpsrc will
* output data with a media type of application/x-icy, in which case you will
* need to use the #ICYDemux element as follow-up element to extract the Icecast
* need to use the #GstICYDemux element as follow-up element to extract the Icecast
* metadata and to determine the underlying media type.
*
* ## Example launch line
......
......@@ -30,8 +30,8 @@
* Tags sent by upstream elements will be picked up automatically (and merged
* according to the merge mode set via the tag setter interface).
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
*
* |[
* gst-launch-1.0 -v filesrc location=foo.ogg ! decodebin ! audioconvert ! lame ! apev2mux ! filesink location=foo.mp3
* ]| A pipeline that transcodes a file from Ogg/Vorbis to mp3 format with an
......@@ -40,7 +40,6 @@
* |[
* gst-launch-1.0 -m filesrc location=foo.mp3 ! apedemux ! fakesink silent=TRUE 2&gt; /dev/null | grep taglist
* ]| Verify that tags have been written.
* </refsect2>
*/
#ifdef HAVE_CONFIG_H
......
......@@ -31,8 +31,8 @@
* Tags sent by upstream elements will be picked up automatically (and merged
* according to the merge mode set via the tag setter interface).
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
*
* |[
* gst-launch-1.0 -v filesrc location=foo.ogg ! decodebin ! audioconvert ! lame ! id3v2mux ! filesink location=foo.mp3
* ]| A pipeline that transcodes a file from Ogg/Vorbis to mp3 format with an
......@@ -41,7 +41,6 @@
* |[
* gst-launch-1.0 -m filesrc location=foo.mp3 ! id3demux ! fakesink silent=TRUE 2&gt; /dev/null | grep taglist
* ]| Verify that tags have been written.
* </refsect2>
*/
#ifdef HAVE_CONFIG_H
......
......@@ -30,8 +30,8 @@
*
* This element encodes raw integer audio into an MPEG-1 layer 2 (MP2) stream.
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
*
* |[
* gst-launch-1.0 -v audiotestsrc wave=sine num-buffers=100 ! audioconvert ! twolame ! filesink location=sine.mp2
* ]| Encode a test sine signal to MP2.
......@@ -44,7 +44,6 @@
* |[
* gst-launch-1.0 -v cdda://5 ! audioconvert ! twolame bitrate=192 ! filesink location=track5.mp2
* ]| Encode Audio CD track 5 to MP2
* </refsect2>
*
*/
......
......@@ -30,10 +30,10 @@
* </ulink>. It's the successor of On2 VP3, which was the base of the
* Theora video codec.
*
* To control the quality of the encoding, the #GstVP8Enc::target-bitrate,
* #GstVP8Enc::min-quantizer, #GstVP8Enc::max-quantizer or #GstVP8Enc::cq-level
* To control the quality of the encoding, the #GstVP8Enc:target-bitrate,
* #GstVP8Enc:min-quantizer, #GstVP8Enc:max-quantizer or #GstVP8Enc:cq-level
* properties can be used. Which one is used depends on the mode selected by
* the #GstVP8Enc::end-usage property.
* the #GstVP8Enc:end-usage property.
* See <ulink url="http://www.webmproject.org/docs/encoder-parameters/">Encoder Parameters</ulink>
* for explanation, examples for useful encoding parameters and more details
* on the encoding parameters.
......
......@@ -30,10 +30,10 @@
* </ulink>. It's the successor of On2 VP3, which was the base of the
* Theora video codec.
*
* To control the quality of the encoding, the #GstVP9Enc::target-bitrate,
* #GstVP9Enc::min-quantizer, #GstVP9Enc::max-quantizer or #GstVP9Enc::cq-level
* To control the quality of the encoding, the #GstVP9Enc:target-bitrate,
* #GstVP9Enc:min-quantizer, #GstVP9Enc:max-quantizer or #GstVP9Enc:cq-level
* properties can be used. Which one is used depends on the mode selected by
* the #GstVP9Enc::end-usage property.
* the #GstVP9Enc:end-usage property.
* See <ulink url="http://www.webmproject.org/docs/encoder-parameters/">Encoder Parameters</ulink>
* for explanation, examples for useful encoding parameters and more details
* on the encoding parameters.
......
......@@ -25,7 +25,7 @@
*
* autoaudiosink is an audio sink that automatically detects an appropriate
* audio sink to use. It does so by scanning the registry for all elements
* that have <quote>Sink</quote> and <quote>Audio</quote> in the class field
* that have "Sink" and "Audio" in the class field
* of their element information, and also have a non-zero autoplugging rank.
*
* ## Example launch line
......
......@@ -26,7 +26,7 @@
*
* autoaudiosrc is an audio source that automatically detects an appropriate
* audio source to use. It does so by scanning the registry for all elements
* that have <quote>Source</quote> and <quote>Audio</quote> in the class field
* that have "Source" and "Audio" in the class field
* of their element information, and also have a non-zero autoplugging rank.
*
* ## Example launch line
......
......@@ -25,7 +25,7 @@
*
* autovideosink is a video sink that automatically detects an appropriate
* video sink to use. It does so by scanning the registry for all elements
* that have <quote>Sink</quote> and <quote>Video</quote> in the class field
* that have "Sink" and "Video" in the class field
* of their element information, and also have a non-zero autoplugging rank.
*
* ## Example launch line
......
......@@ -26,7 +26,7 @@
*
* autovideosrc is a video src that automatically detects an appropriate
* video source to use. It does so by scanning the registry for all elements
* that have <quote>Source</quote> and <quote>Video</quote> in the class field
* that have "Source" and "Video" in the class field
* of their element information, and also have a non-zero autoplugging rank.
*
* ## Example launch line
......
......@@ -37,64 +37,22 @@
* structure of name "dtmf-event" with fields set according to the following
* table:
*
* <informaltable>
* <tgroup cols='4'>
* <colspec colname='Name' />
* <colspec colname='Type' />
* <colspec colname='Possible values' />
* <colspec colname='Purpose' />
* <thead>
* <row>
* <entry>Name</entry>
* <entry>GType</entry>
* <entry>Possible values</entry>
* <entry>Purpose</entry>
* </row>
* </thead>
* <tbody>
* <row>
* <entry>type</entry>
* <entry>G_TYPE_INT</entry>
* <entry>0-1</entry>
* <entry>The application uses this field to specify which of the two methods
* specified in RFC 2833 to use. The value should be 0 for tones and 1 for
* named events. Tones are specified by their frequencies and events are specied
* by their number. This element can only take events as input. Do not confuse
* with "method" which specified the output.
* </entry>
* </row>
* <row>
* <entry>number</entry>
* <entry>G_TYPE_INT</entry>
* <entry>0-15</entry>
* <entry>The event number.</entry>
* </row>
* <row>
* <entry>volume</entry>
* <entry>G_TYPE_INT</entry>
* <entry>0-36</entry>
* <entry>This field describes the power level of the tone, expressed in dBm0
* after dropping the sign. Power levels range from 0 to -63 dBm0. The range of
* valid DTMF is from 0 to -36 dBm0. Can be omitted if start is set to FALSE.
* </entry>
* </row>
* <row>
* <entry>start</entry>
* <entry>G_TYPE_BOOLEAN</entry>
* <entry>True or False</entry>
* <entry>Whether the event is starting or ending.</entry>
* </row>
* <row>
* <entry>method</entry>
* <entry>G_TYPE_INT</entry>
* <entry>2</entry>
* <entry>The method used for sending event, this element will react if this
* field is absent or 2.
* </entry>
* </row>
* </tbody>
* </tgroup>
* </informaltable>
* * `type` (G_TYPE_INT, 0-1): The application uses this field to specify which of the two methods
* specified in RFC 2833 to use. The value should be 0 for tones and 1 for
* named events. Tones are specified by their frequencies and events are specied
* by their number. This element can only take events as input. Do not confuse
* with "method" which specified the output.
*
* * `number` (G_TYPE_INT, 0-15): The event number.
*
* * `volume` (G_TYPE_INT, 0-36): This field describes the power level of the tone, expressed in dBm0
* after dropping the sign. Power levels range from 0 to -63 dBm0. The range of
* valid DTMF is from 0 to -36 dBm0. Can be omitted if start is set to FALSE.
*
* * `start` (G_TYPE_BOOLEAN, True or False): Whether the event is starting or ending.
*
* * `method` (G_TYPE_INT, 2): The method used for sending event, this element will react if this
* field is absent or 2.
*
* For example, the following code informs the pipeline (and in turn, the
* DTMFSrc element inside the pipeline) about the start of a DTMF named
......
......@@ -27,58 +27,21 @@
* This element takes RTP DTMF packets and produces sound. It also emits a
* message on the #GstBus.
*
* The message is called "dtmf-event" and has the following fields
* <informaltable>
* <tgroup cols='4'>
* <colspec colname='Name' />
* <colspec colname='Type' />
* <colspec colname='Possible values' />
* <colspec colname='Purpose' />
* <thead>
* <row>
* <entry>Name</entry>
* <entry>GType</entry>
* <entry>Possible values</entry>
* <entry>Purpose</entry>
* </row>
* </thead>
* <tbody>
* <row>
* <entry>type</entry>
* <entry>G_TYPE_INT</entry>
* <entry>0-1</entry>
* <entry>Which of the two methods
* specified in RFC 2833 to use. The value should be 0 for tones and 1 for
* named events. Tones are specified by their frequencies and events are specied
* by their number. This element currently only recognizes events.
* Do not confuse with "method" which specified the output.
* </entry>
* </row>
* <row>
* <entry>number</entry>
* <entry>G_TYPE_INT</entry>
* <entry>0-16</entry>
* <entry>The event number.</entry>
* </row>
* <row>
* <entry>volume</entry>
* <entry>G_TYPE_INT</entry>
* <entry>0-36</entry>
* <entry>This field describes the power level of the tone, expressed in dBm0
* after dropping the sign. Power levels range from 0 to -63 dBm0. The range of
* valid DTMF is from 0 to -36 dBm0.
* </entry>
* </row>
* <row>
* <entry>method</entry>
* <entry>G_TYPE_INT</entry>
* <entry>1</entry>
* <entry>This field will always been 1 (ie RTP event) from this element.
* </entry>
* </row>
* </tbody>
* </tgroup>
* </informaltable>
* The message is called "dtmf-event" and has the following fields:
*
* * `type` (G_TYPE_INT, 0-1): Which of the two methods
* specified in RFC 2833 to use. The value should be 0 for tones and 1 for
* named events. Tones are specified by their frequencies and events are specied
* by their number. This element currently only recognizes events.
* Do not confuse with "method" which specified the output.
*
* * `number` (G_TYPE_INT, 0-16): The event number.
*
* * `volume` (G_TYPE_INT, 0-36): This field describes the power level of the tone, expressed in dBm0
* after dropping the sign. Power levels range from 0 to -63 dBm0. The range of
* valid DTMF is from 0 to -36 dBm0.
*
* * `method` (G_TYPE_INT, 1): This field will always been 1 (ie RTP event) from this element.
*/
#ifdef HAVE_CONFIG_H
......
......@@ -35,64 +35,22 @@
* structure of name "dtmf-event" with fields set according to the following
* table:
*
* <informaltable>
* <tgroup cols='4'>
* <colspec colname='Name' />
* <colspec colname='Type' />
* <colspec colname='Possible values' />
* <colspec colname='Purpose' />
* <thead>
* <row>
* <entry>Name</entry>
* <entry>GType</entry>
* <entry>Possible values</entry>
* <entry>Purpose</entry>
* </row>
* </thead>
* <tbody>
* <row>
* <entry>type</entry>
* <entry>G_TYPE_INT</entry>
* <entry>0-1</entry>
* <entry>The application uses this field to specify which of the two methods
* specified in RFC 2833 to use. The value should be 0 for tones and 1 for
* named events. Tones are specified by their frequencies and events are specied
* by their number. This element can only take events as input. Do not confuse
* with "method" which specified the output.
* </entry>
* </row>
* <row>
* <entry>number</entry>
* <entry>G_TYPE_INT</entry>
* <entry>0-15</entry>
* <entry>The event number.</entry>
* </row>
* <row>
* <entry>volume</entry>
* <entry>G_TYPE_INT</entry>
* <entry>0-36</entry>
* <entry>This field describes the power level of the tone, expressed in dBm0
* after dropping the sign. Power levels range from 0 to -63 dBm0. The range of
* valid DTMF is from 0 to -36 dBm0. Can be omitted if start is set to FALSE.
* </entry>
* </row>
* <row>
* <entry>start</entry>
* <entry>G_TYPE_BOOLEAN</entry>
* <entry>True or False</entry>
* <entry>Whether the event is starting or ending.</entry>
* </row>
* <row>
* <entry>method</entry>
* <entry>G_TYPE_INT</entry>
* <entry>1</entry>
* <entry>The method used for sending event, this element will react if this
* field is absent or 1.
* </entry>
* </row>
* </tbody>
* </tgroup>
* </informaltable>
* * `type` (G_TYPE_INT, 0-1): The application uses this field to specify which of the two methods
* specified in RFC 2833 to use. The value should be 0 for tones and 1 for
* named events. Tones are specified by their frequencies and events are specied
* by their number. This element can only take events as input. Do not confuse
* with "method" which specified the output.
*
* * `number` (G_TYPE_INT, 0-15): The event number.
*
* * `volume` (G_TYPE_INT, 0-36): This field describes the power level of the tone, expressed in dBm0
* after dropping the sign. Power levels range from 0 to -63 dBm0. The range of
* valid DTMF is from 0 to -36 dBm0. Can be omitted if start is set to FALSE.
*
* * `start` (G_TYPE_BOOLEAN, True or False): Whether the event is starting or ending.
*
* * `method` (G_TYPE_INT, 1): The method used for sending event, this element will react if this
* field is absent or 1.
*
* For example, the following code informs the pipeline (and in turn, the
* RTPDTMFSrc element inside the pipeline) about the start of an RTP DTMF named
......
......@@ -58,6 +58,7 @@ G_BEGIN_DECLS
#define FOURCC_MAC6 GST_MAKE_FOURCC('M','A','C','6')
#define FOURCC_MP4V GST_MAKE_FOURCC('M','P','4','V')
#define FOURCC_PICT GST_MAKE_FOURCC('P','I','C','T')
#define FOURCC_pict GST_MAKE_FOURCC('p','i','c','t')
#define FOURCC_QDM2 GST_MAKE_FOURCC('Q','D','M','2')
#define FOURCC_SVQ3 GST_MAKE_FOURCC('S','V','Q','3')
#define FOURCC_VP31 GST_MAKE_FOURCC('V','P','3','1')
......@@ -391,6 +392,21 @@ G_BEGIN_DECLS
#define FOURCC_tenc GST_MAKE_FOURCC('t','e','n','c')
#define FOURCC_cenc GST_MAKE_FOURCC('c','e','n','c')
/* child atoms of meta */
#define FOURCC_pitm GST_MAKE_FOURCC('p','i','t','m')
#define FOURCC_iloc GST_MAKE_FOURCC('i','l','o','c')
#define FOURCC_iinf GST_MAKE_FOURCC('i','i','n','f')
#define FOURCC_infe GST_MAKE_FOURCC('i','n','f','e')
#define FOURCC_iref GST_MAKE_FOURCC('i','r','e','f')
/* High Efficiency Image File Format (HEIF) */
#define FOURCC_mif1 GST_MAKE_FOURCC('m','i','f','1')
#define FOURCC_msf1 GST_MAKE_FOURCC('m','s','f','1')
#define FOURCC_iprp GST_MAKE_FOURCC('i','p','r','p')
#define FOURCC_ipco GST_MAKE_FOURCC('i','p','c','o')
#define FOURCC_ispe GST_MAKE_FOURCC('i','s','p','e')
#define FOURCC_ipma GST_MAKE_FOURCC('i','p','m','a')
G_END_DECLS
#endif /* __FOURCC_H__ */
......@@ -66,20 +66,20 @@
* The fragmented file features defined (only) in ISO Base Media are used by
* ISMV files making up (a.o.) Smooth Streaming (ismlmux).
*
* A few properties (#GstMp4Mux:movie-timescale, #GstMp4Mux:trak-timescale)
* A few properties (#GstMP4Mux:movie-timescale, #GstMP4Mux:trak-timescale)
* allow adjusting some technical parameters, which might be useful in (rare)
* cases to resolve compatibility issues in some situations.
*
* Some other properties influence the result more fundamentally.
* A typical mov/mp4 file's metadata (aka moov) is located at the end of the
* file, somewhat contrary to this usually being called "the header".
* However, a #GstMp4Mux:faststart file will (with some effort) arrange this to
* However, a #GstMP4Mux:faststart file will (with some effort) arrange this to
* be located near start of the file, which then allows it e.g. to be played
* while downloading. Alternatively, rather than having one chunk of metadata at
* start (or end), there can be some metadata at start and most of the other
* data can be spread out into fragments of #GstMp4Mux:fragment-duration.
* data can be spread out into fragments of #GstMP4Mux:fragment-duration.
* If such fragmented layout is intended for streaming purposes, then
* #GstMp4Mux:streamable allows foregoing to add index metadata (at the end of
* #GstMP4Mux:streamable allows foregoing to add index metadata (at the end of
* file).
*
* ## Example pipelines
......
......@@ -90,12 +90,12 @@
* is interrupted uncleanly by a crash. Robust muxing mode requires a seekable
* output, such as filesink, because it needs to rewrite the start of the file.
*
* To enable robust muxing mode, set the #GstQTMux::reserved-moov-update-period
* and #GstQTMux::reserved-max-duration property. Also present is the
* #GstQTMux::reserved-bytes-per-sec property, which can be increased if
* To enable robust muxing mode, set the #GstQTMux:reserved-moov-update-period
* and #GstQTMux:reserved-max-duration property. Also present is the
* #GstQTMux:reserved-bytes-per-sec property, which can be increased if
* for some reason the default is not large enough and the initial reserved
* space for headers is too small. Applications can monitor the
* #GstQTMux::reserved-duration-remaining property to see how close to full
* #GstQTMux:reserved-duration-remaining property to see how close to full
* the reserved space is becoming.
*
* Applications that wish to be able to use/edit a file while it is being
......@@ -104,7 +104,7 @@
* completely valid header from the start for all tracks (i.e. it appears as
* though the file is "reserved-max-duration" long with all samples
* present). This mode can be enabled by setting the
* #GstQTMux::reserved-moov-update-period and #GstQTMux::reserved-prefill
* #GstQTMux:reserved-moov-update-period and #GstQTMux:reserved-prefill
* properties. Note that this mode is only possible with input streams that have
* a fixed sample size (such as raw audio and Prores Video) and that don't
* have reordered samples.
......
This diff is collapsed.
......@@ -57,6 +57,13 @@ enum QtDemuxState
QTDEMUX_STATE_BUFFER_MDAT /* Buffering the mdat atom */
};
enum QtDemuxHeaderState
{
QTDEMUX_HEADER_NONE, /* Initial state (haven't got moov or initial meta yet) */
QTDEMUX_HEADER_MOOV, /* Got moov and it's parsed */
QTDEMUX_HEADER_INIT /* Got init. data via caps or 'meta' atom */
};
struct _GstQTDemux {
GstElement element;
......@@ -97,11 +104,16 @@ struct _GstQTDemux {
* to a temporary variable ? */
GNode *moov_node;
/* top-level [meta] header */
GNode *meta_node;
GstBuffer *metabuffer;
/* FIXME : This is never freed. It is only assigned once. memleak ? */
GNode *moov_node_compressed;
/* Set to TRUE when the [moov] header has been fully parsed */
gboolean got_moov;
/* Currrent initial header state. Depending on specification, qtdemux can
* be initialized via moov, caps, or some other way */
enum QtDemuxHeaderState header_state;
/* Global timescale for the incoming stream. Use the QTTIME macros
* to convert values to/from GstClockTime */
......
This diff is collapsed.
......@@ -91,6 +91,18 @@ gboolean qtdemux_dump_fLaC (GstQTDemux * qtdemux, GstByteReader * data,
int depth);
gboolean qtdemux_dump_gmin (GstQTDemux * qtdemux, GstByteReader * data,
int depth);
gboolean qtdemux_dump_pitm (GstQTDemux * qtdemux, GstByteReader * data,
int depth);
gboolean qtdemux_dump_iloc (GstQTDemux * qtdemux, GstByteReader * data,
int depth);
gboolean qtdemux_dump_iinf (GstQTDemux * qtdemux, GstByteReader * data,
int depth);
gboolean qtdemux_dump_infe (GstQTDemux * qtdemux, GstByteReader * data,
int depth);
gboolean qtdemux_dump_ispe (GstQTDemux * qtdemux, GstByteReader * data,
int depth);
gboolean qtdemux_dump_ipma (GstQTDemux * qtdemux, GstByteReader * data,
int depth);
gboolean qtdemux_node_dump (GstQTDemux * qtdemux, GNode * node);
......
......@@ -220,6 +220,15 @@ static const QtNodeType qt_node_types[] = {
{FOURCC_av1m, "AV1 Multi-Frame sample group entry", 0},
{FOURCC_av1s, "AV1 S-Frame sample group entry", 0},
{FOURCC_av1M, "AV1 Metadata sample group entry", 0},
{FOURCC_pitm, "primary item", 0, qtdemux_dump_pitm},
{FOURCC_iloc, "item location", 0, qtdemux_dump_iloc},
{FOURCC_iinf, "item information", 0, qtdemux_dump_iinf},
{FOURCC_infe, "item information entry", 0, qtdemux_dump_infe},
{FOURCC_iref, "item reference", 0},
{FOURCC_iprp, "item properties", QT_FLAG_CONTAINER},
{FOURCC_ipco, "item property container", QT_FLAG_CONTAINER},
{FOURCC_ispe, "item spatial extents", 0, qtdemux_dump_ispe},
{FOURCC_ipma, "item property association", 0, qtdemux_dump_ipma},
{0, "unknown", 0,},
};
......
......@@ -45,9 +45,7 @@
*
* ## Example application
*
* <informalexample><programlisting language="C">
* <xi:include xmlns:xi="http://www.w3.org/2003/XInclude" parse="text" href="../../../../tests/examples/level/level-example.c" />
* </programlisting></informalexample>
* {{ tests/examples/level/level-example.c }}
*
*/
......
......@@ -659,7 +659,6 @@ gst_matroska_demux_parse_colour (GstMatroskaDemux * demux, GstEbmlRead * ebml,
* GstVideoTransferFunction, but functionally same as "1: BT709" */
case 1:
case 6:
case 14:
colorimetry.transfer = GST_VIDEO_TRANSFER_BT709;
break;
case 2:
......@@ -686,9 +685,18 @@ gst_matroska_demux_parse_colour (GstMatroskaDemux * demux, GstEbmlRead * ebml,
case 13:
colorimetry.transfer = GST_VIDEO_TRANSFER_SRGB;
break;
case 14:
colorimetry.transfer = GST_VIDEO_TRANSFER_BT2020_10;
break;
case 15:
colorimetry.transfer = GST_VIDEO_TRANSFER_BT2020_12;
break;
case 16:
colorimetry.transfer = GST_VIDEO_TRANSFER_SMPTE2084;
break;
case 18:
colorimetry.transfer = GST_VIDEO_TRANSFER_BT2020_12;
break;
default:
GST_FIXME_OBJECT (demux,
"Unsupported color transfer characteristics %"
......
......@@ -72,7 +72,8 @@ enum
PROP_STREAMABLE,
PROP_TIMECODESCALE,
PROP_MIN_CLUSTER_DURATION,
PROP_MAX_CLUSTER_DURATION
PROP_MAX_CLUSTER_DURATION,
PROP_OFFSET_TO_ZERO,
};
#define DEFAULT_DOCTYPE_VERSION 2
......@@ -82,6 +83,7 @@ enum
#define DEFAULT_TIMECODESCALE GST_MSECOND
#define DEFAULT_MIN_CLUSTER_DURATION 500 * GST_MSECOND
#define DEFAULT_MAX_CLUSTER_DURATION 65535 * GST_MSECOND
#define DEFAULT_OFFSET_TO_ZERO FALSE
/* WAVEFORMATEX is gst_riff_strf_auds + an extra guint16 extension size */
#define WAVEFORMATEX_SIZE (2 + sizeof (gst_riff_strf_auds))
......@@ -363,6 +365,10 @@ gst_matroska_mux_class_init (GstMatroskaMuxClass * klass)
"0 means no maximum duration.", 0,
G_MAXINT64, DEFAULT_MAX_CLUSTER_DURATION,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_OFFSET_TO_ZERO,
g_param_spec_boolean ("offset-to-zero", "Offset To Zero",
"Offsets all streams so that the " "earliest stream starts at 0.",
DEFAULT_OFFSET_TO_ZERO, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_matroska_mux_change_state);
......@@ -2595,6 +2601,9 @@ gst_matroska_mux_write_colour (GstMatroskaMux * mux,
case GST_VIDEO_COLOR_MATRIX_BT2020:
matrix_id = 9;
break;
default:
GST_FIXME_OBJECT (mux, "Unhandled color matrix %d", matrix_id);
break;
}
switch (videocontext->colorimetry.range) {
......@@ -2641,9 +2650,22 @@ gst_matroska_mux_write_colour (GstMatroskaMux * mux,
case GST_VIDEO_TRANSFER_SRGB:
transfer_id = 13;
break;
case GST_VIDEO_TRANSFER_BT2020_10:
transfer_id = 14;
break;
case GST_VIDEO_TRANSFER_BT2020_12:
transfer_id = 15;
break;
case GST_VIDEO_TRANSFER_SMPTE2084:
transfer_id = 16;
break;
case GST_VIDEO_TRANSFER_ARIB_STD_B67:
transfer_id = 18;
break;
default:
GST_FIXME_OBJECT (mux,
"Unhandled transfer characteristic %d", transfer_id);
break;
}
switch (videocontext->colorimetry.primaries) {
......@@ -2685,6 +2707,9 @@ gst_matroska_mux_write_colour (GstMatroskaMux * mux,
case GST_VIDEO_COLOR_PRIMARIES_EBU3213:
primaries_id = 22;
break;
default:
GST_FIXME_OBJECT (mux, "Unhandled color primaries %d", primaries_id);
break;
}
gst_ebml_write_uint (ebml, GST_MATROSKA_ID_VIDEORANGE, range_id);
......@@ -3048,6 +3073,7 @@ gst_matroska_mux_start (GstMatroskaMux * mux, GstMatroskaPad * first_pad,
GSList *collected;
int i;
guint tracknum = 1;
GstClockTime earliest_time = GST_CLOCK_TIME_NONE;
GstClockTime duration = 0;
guint32 segment_uid[4];
GTimeVal time = { 0, 0 };
......@@ -3207,6 +3233,29 @@ gst_matroska_mux_start (GstMatroskaMux * mux, GstMatroskaPad * first_pad,
if (collect_pad->track->codec_id == NULL)
continue;
/* Find the smallest timestamp so we can offset all streams by this to
* start at 0 */
if (mux->offset_to_zero) {
GstClockTime ts;
if (collect_pad == first_pad)
buf = first_pad_buf ? gst_buffer_ref (first_pad_buf) : NULL;
else
buf = gst_collect_pads_peek (mux->collect, collected->data);
if (buf) {
ts = gst_matroska_track_get_buffer_timestamp (collect_pad->track, buf);
if (earliest_time == GST_CLOCK_TIME_NONE)
earliest_time = ts;
else if (ts != GST_CLOCK_TIME_NONE && ts < earliest_time)
earliest_time = ts;
}
if (buf)
gst_buffer_unref (buf);
}
/* For audio tracks, use the first buffers duration as the default
* duration if we didn't get any better idea from the caps event already
*/
......@@ -3235,6 +3284,8 @@ gst_matroska_mux_start (GstMatroskaMux * mux, GstMatroskaPad * first_pad,
}
gst_ebml_write_master_finish (ebml, master);
mux->earliest_time = earliest_time == GST_CLOCK_TIME_NONE ? 0 : earliest_time;
/* chapters */
toc = gst_toc_setter_get_toc (GST_TOC_SETTER (mux));
if (toc != NULL && !mux->ebml_write->streamable) {
......@@ -3891,6 +3942,11 @@ gst_matroska_mux_write_data (GstMatroskaMux * mux, GstMatroskaPad * collect_pad,
buffer_timestamp =
gst_matroska_track_get_buffer_timestamp (collect_pad->track, buf);
if (buffer_timestamp >= mux->earliest_time) {
buffer_timestamp -= mux->earliest_time;
} else {
buffer_timestamp = 0;
}
/* hm, invalid timestamp (due to --to be fixed--- element upstream);
* this would wreak havoc with time stored in matroska file */
......@@ -4169,6 +4225,14 @@ gst_matroska_mux_handle_buffer (GstCollectPads * pads, GstCollectData * data,
g_assert (buf);
buffer_timestamp = gst_matroska_track_get_buffer_timestamp (best->track, buf);
if (buffer_timestamp >= mux->earliest_time) {
buffer_timestamp -= mux->earliest_time;
} else {
GST_ERROR_OBJECT (mux,
"PTS before first PTS (%" GST_TIME_FORMAT " < %" GST_TIME_FORMAT ")",
GST_TIME_ARGS (buffer_timestamp), GST_TIME_ARGS (mux->earliest_time));
buffer_timestamp = 0;
}
GST_DEBUG_OBJECT (best->collect.pad, "best pad - buffer ts %"
GST_TIME_FORMAT " dur %" GST_TIME_FORMAT,
......@@ -4285,6 +4349,9 @@ gst_matroska_mux_set_property (GObject * object,
case PROP_MAX_CLUSTER_DURATION:
mux->max_cluster_duration = g_value_get_int64 (value);
break;
case PROP_OFFSET_TO_ZERO:
mux->offset_to_zero = g_value_get_boolean (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
......@@ -4322,6 +4389,9 @@ gst_matroska_mux_get_property (GObject * object,
case PROP_MAX_CLUSTER_DURATION:
g_value_set_int64 (value, mux->max_cluster_duration);
break;
case PROP_OFFSET_TO_ZERO:
g_value_set_boolean (value, mux->offset_to_zero);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
......
......@@ -111,6 +111,9 @@ struct _GstMatroskaMux {
guint64 max_cluster_duration;
guint64 min_cluster_duration;
/* earliest timestamp (time, ns) if offsetting to zero */
gboolean offset_to_zero;
guint64 earliest_time;
/* length, position (time, ns) */
guint64 duration;
......
......@@ -29,7 +29,7 @@
* after the first picture. We also need a videorate element to set timestamps
* on all buffers after the first one in accordance with the framerate.
*
* File names are created by replacing "\%d" with the index using printf().
* File names are created by replacing "\%d" with the index using `printf()`.
*
* ## Example launch line
* |[
......
......@@ -2026,10 +2026,12 @@ handle_gathered_gop (GstSplitMuxSink * splitmux)
/* Check for overrun - have we output at least one byte and overrun
* either threshold? */
if (need_new_fragment (splitmux, queued_time, queued_gop_time, queued_bytes)) {
GstClockTime *sink_running_time = g_new (GstClockTime, 1);
*sink_running_time = splitmux->reference_ctx->out_running_time;
g_object_set_qdata_full (G_OBJECT (splitmux->sink),
RUNNING_TIME, sink_running_time, g_free);
if (splitmux->async_finalize) {
GstClockTime *sink_running_time = g_new (GstClockTime, 1);
*sink_running_time = splitmux->reference_ctx->out_running_time;
g_object_set_qdata_full (G_OBJECT (splitmux->sink),
RUNNING_TIME, sink_running_time, g_free);
}
g_atomic_int_set (&(splitmux->do_split_next_gop), FALSE);
/* Tell the output side to start a new fragment */
GST_INFO_OBJECT (splitmux,
......
......@@ -25,13 +25,12 @@
* Extract raw audio from RTP packets according to RFC 3551.
* For detailed information see: http://www.rfc-editor.org/rfc/rfc3551.txt
*
* <refsect2>
* <title>Example pipeline</title>
* ## Example pipeline
*
* |[
* gst-launch udpsrc caps='application/x-rtp, media=(string)audio, clock-rate=(int)44100, encoding-name=(string)L8, encoding-params=(string)1, channels=(int)1, payload=(int)96' ! rtpL8depay ! pulsesink
* ]| This example pipeline will depayload an RTP raw audio stream. Refer to
* the rtpL8pay example to create the RTP stream.
* </refsect2>
*/
#ifdef HAVE_CONFIG_H
......
......@@ -25,13 +25,12 @@
* Payload raw audio into RTP packets according to RFC 3551.
* For detailed information see: http://www.rfc-editor.org/rfc/rfc3551.txt
*
* <refsect2>
* <title>Example pipeline</title>
* ## Example pipeline
*
* |[
* gst-launch -v audiotestsrc ! audioconvert ! rtpL8pay ! udpsink
* ]| This example pipeline will payload raw audio. Refer to
* the rtpL8depay example to depayload and play the RTP stream.
* </refsect2>
*/
#ifdef HAVE_CONFIG_H
......
......@@ -241,6 +241,7 @@ gst_rtp_mp4g_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
"mpegversion", G_TYPE_INT, 4, "stream-format", G_TYPE_STRING, "raw",
NULL);
rtpmp4gdepay->check_adts = TRUE;
rtpmp4gdepay->warn_adts = TRUE;
} else if (strcmp (str, "video") == 0) {
srccaps = gst_caps_new_simple ("video/mpeg",
"mpegversion", G_TYPE_INT, 4,
......@@ -681,13 +682,17 @@ gst_rtp_mp4g_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
0xfffe0000, 0xfff00000, 0, 4, &v) == 0) {
guint adts_hdr_len = (((v >> 16) & 0x1) == 0) ? 9 : 7;
if (avail > adts_hdr_len) {
GST_WARNING_OBJECT (rtpmp4gdepay, "Detected ADTS header of "
"%u bytes, skipping", adts_hdr_len);
if (rtpmp4gdepay->warn_adts) {
GST_WARNING_OBJECT (rtpmp4gdepay, "Detected ADTS header of "
"%u bytes, skipping", adts_hdr_len);
rtpmp4gdepay->warn_adts = FALSE;
}
gst_adapter_flush (rtpmp4gdepay->adapter, adts_hdr_len);
avail -= adts_hdr_len;
}
} else {
rtpmp4gdepay->check_adts = FALSE;
rtpmp4gdepay->warn_adts = TRUE;
}
}
......
......@@ -68,6 +68,7 @@ struct _GstRtpMP4GDepay
guint prev_AU_num;
gboolean check_adts; /* check for ADTS headers */
gboolean warn_adts; /* warn about ADTS headers */
GQueue *packets;
......
......@@ -36,12 +36,11 @@
* When using #GstRtpBin, this element should be inserted through the
* #GstRtpBin::request-aux-receiver signal.
*
* <refsect2>
* <title>Example pipeline</title>
* ## Example pipeline
*
* |[
* gst-launch-1.0 udpsrc port=8888 caps="application/x-rtp, payload=96, clock-rate=90000" ! rtpreddec pt=122 ! rtpstorage size-time=220000000 ! rtpssrcdemux ! application/x-rtp, payload=96, clock-rate=90000, media=video, encoding-name=H264 ! rtpjitterbuffer do-lost=1 latency=200 ! rtpulpfecdec pt=122 ! rtph264depay ! avdec_h264 ! videoconvert ! autovideosink
* ]| This example will receive a stream with RED and ULP FEC and try to reconstruct the packets.
* </refsect2>
*
* See also: #GstRtpRedEnc, #GstWebRTCBin, #GstRtpBin
* Since: 1.14
......
......@@ -38,12 +38,11 @@
* When using #GstRtpBin, this element should be inserted through the
* #GstRtpBin::request-fec-encoder signal.
*
* <refsect2>
* <title>Example pipeline</title>
* ## Example pipeline
*
* |[
* gst-launch-1.0 videotestsrc ! x264enc ! video/x-h264, profile=baseline ! rtph264pay pt=96 ! rtpulpfecenc percentage=100 pt=122 ! rtpredenc pt=122 distance=2 ! identity drop-probability=0.05 ! udpsink port=8888
* ]| This example will send a stream with RED and ULP FEC.
* </refsect2>
*
* See also: #GstRtpRedDec, #GstWebRTCBin, #GstRtpBin
* Since: 1.14
......
......@@ -44,18 +44,16 @@
* When using #GstRtpBin, this element should be inserted through the
* #GstRtpBin::request-fec-decoder signal.
*
* <refsect2>
* <title>Example pipeline</title>
* ## Example pipeline
*
* |[
* gst-launch-1.0 udpsrc port=8888 caps="application/x-rtp, payload=96, clock-rate=90000" ! rtpstorage size-time=220000000 ! rtpssrcdemux ! application/x-rtp, payload=96, clock-rate=90000, media=video, encoding-name=H264 ! rtpjitterbuffer do-lost=1 latency=200 ! rtpulpfecdec pt=122 ! rtph264depay ! avdec_h264 ! videoconvert ! autovideosink
* ]| This example will receive a stream with FEC and try to reconstruct the packets.
*
* Example programs are available at
* <ulink url="https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/blob/master/examples/src/bin/rtpfecserver.rs">rtpfecserver.rs</ulink>
* <https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/blob/master/examples/src/bin/rtpfecserver.rs>
* and
* <ulink url="https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/blob/master/examples/src/bin/rtpfecclient.rs">rtpfecclient.rs</ulink>
*
* </refsect2>
* <https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/blob/master/examples/src/bin/rtpfecclient.rs>
*
* See also: #GstRtpUlpFecEnc, #GstRtpBin, #GstRtpStorage
* Since: 1.14
......
......@@ -69,18 +69,16 @@
* When using #GstRtpBin, this element should be inserted through the
* #GstRtpBin::request-fec-encoder signal.
*
* ## Example pipeline
*
* <refsect2>
* <title>Example pipeline</title>
* |[
* gst-launch-1.0 videotestsrc ! x264enc ! video/x-h264, profile=baseline ! rtph264pay pt=96 ! rtpulpfecenc percentage=100 pt=122 ! udpsink port=8888
* ]| This example will receive a stream with FEC and try to reconstruct the packets.
*
* Example programs are available at
* <ulink url="https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/blob/master/examples/src/bin/rtpfecserver.rs">rtpfecserver.rs</ulink>
* <https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/blob/master/examples/src/bin/rtpfecserver.rs>
* and
* <ulink url="https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/blob/master/examples/src/bin/rtpfecclient.rs">rtpfecclient.rs</ulink>
* </refsect2>
* <https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/blob/master/examples/src/bin/rtpfecclient.rs>
*
* See also: #GstRtpUlpFecDec, #GstRtpBin
* Since: 1.14
......
......@@ -2030,7 +2030,9 @@ gst_rtp_bin_class_init (GstRtpBinClass * klass)
* @rtpbin: the object which received the signal
* @id: the session id
*
* Request the internal RTPStorage object as #GObject in session @id.
* Request the internal RTPStorage object as #GObject in session @id. This
* is the internal storage used by the RTPStorage element, which is used to
* keep a backlog of received RTP packets for the session @id.
*
* Since: 1.14
*/
......@@ -2045,7 +2047,8 @@ gst_rtp_bin_class_init (GstRtpBinClass * klass)
* @rtpbin: the object which received the signal
* @id: the session id
*
* Request the RTPStorage element as #GObject in session @id.
* Request the RTPStorage element as #GObject in session @id. This element
* is used to keep a backlog of received RTP packets for the session @id.
*
* Since: 1.16
*/
......
......@@ -2633,10 +2633,10 @@ calculate_jitter (GstRtpJitterBuffer * jitterbuffer, GstClockTime dts,
priv->avg_jitter = (diff + (15 * priv->avg_jitter)) >> 4;
GST_LOG_OBJECT (jitterbuffer,
"dtsdiff %" GST_TIME_FORMAT " rtptime %" GST_TIME_FORMAT
", clock-rate %d, diff %" GST_TIME_FORMAT ", jitter: %" GST_TIME_FORMAT,
GST_TIME_ARGS (dtsdiff), GST_TIME_ARGS (rtpdiffns), priv->clock_rate,
GST_TIME_ARGS (diff), GST_TIME_ARGS (priv->avg_jitter));
"dtsdiff %" GST_STIME_FORMAT " rtptime %" GST_STIME_FORMAT
", clock-rate %d, diff %" GST_STIME_FORMAT ", jitter: %" GST_TIME_FORMAT,
GST_STIME_ARGS (dtsdiff), GST_STIME_ARGS (rtpdiffns), priv->clock_rate,
GST_STIME_ARGS (diff), GST_TIME_ARGS (priv->avg_jitter));
return;
......
......@@ -25,8 +25,8 @@
* @title: rtprtxqueue
*
* rtprtxqueue maintains a queue of transmitted RTP packets, up to a
* configurable limit (see #GstRTPRtxQueue::max-size-time,
* #GstRTPRtxQueue::max-size-packets), and retransmits them upon request
* configurable limit (see #GstRTPRtxQueue:max-size-time,
* #GstRTPRtxQueue:max-size-packets), and retransmits them upon request
* from the downstream rtpsession (GstRTPRetransmissionRequest event).
*
* This element is similar to rtprtxsend, but it has differences:
......
......@@ -40,7 +40,7 @@
* * Support for multiple sender SSRC.
*
* The rtpsession will not demux packets based on SSRC or payload type, nor will
* it correct for packet reordering and jitter. Use #GstRtpsSrcDemux,
* it correct for packet reordering and jitter. Use #GstRtpSsrcDemux,
* #GstRtpPtDemux and GstRtpJitterBuffer in addition to #GstRtpSession to
* perform these tasks. It is usually a good idea to use #GstRtpBin, which
* combines all these features in one element.
......
......@@ -73,7 +73,7 @@
*
* The message's structure contains three fields:
*
* #GstRTSPSrcTimeoutCause `cause`: the cause of the timeout.
* GstRTSPSrcTimeoutCause `cause`: the cause of the timeout.
*
* #gint `stream-number`: an internal identifier of the stream that timed out.
*
......@@ -5468,20 +5468,11 @@ gst_rtspsrc_loop_interleaved (GstRTSPSrc * src)
/* get the next timeout interval */
gst_rtsp_connection_next_timeout (src->conninfo.connection, &tv_timeout);
/* see if the timeout period expired */
if ((tv_timeout.tv_sec | tv_timeout.tv_usec) == 0) {
GST_DEBUG_OBJECT (src, "timout, sending keep-alive");
/* send keep-alive, only act on interrupt, a warning will be posted for
* other errors. */
if ((res = gst_rtspsrc_send_keep_alive (src)) == GST_RTSP_EINTR)
goto interrupt;
/* get new timeout */
gst_rtsp_connection_next_timeout (src->conninfo.connection, &tv_timeout);
}
GST_DEBUG_OBJECT (src, "doing receive with timeout %ld seconds, %ld usec",
tv_timeout.tv_sec, tv_timeout.tv_usec);
gst_rtsp_message_unset (&message);
/* protect the connection with the connection lock so that we can see when
* we are finished doing server communication */
res =
......
......@@ -25,7 +25,7 @@
* framerate. The two incoming buffers are blended together using an effect
* specific alpha mask.
*
* The #GstSmpte:depth property defines the presision in bits of the mask. A
* The #GstSMPTE:depth property defines the presision in bits of the mask. A
* higher presision will create a mask with smoother gradients in order to avoid
* banding.
*
......
......@@ -25,12 +25,12 @@
* using an effect specific SMPTE mask in the I420 input case. In the AYUV case,
* the alpha channel is modified using the effect specific SMPTE mask.
*
* The #GstSmpteAlpha:position property is a controllabe double between 0.0 and
* The #GstSMPTEAlpha:position property is a controllabe double between 0.0 and
* 1.0 that specifies the position in the transition. 0.0 is the start of the
* transition with the alpha channel to complete opaque where 1.0 has the alpha
* channel set to completely transparent.
*
* The #GstSmpteAlpha:depth property defines the precision in bits of the mask.
* The #GstSMPTEAlpha:depth property defines the precision in bits of the mask.
* A higher presision will create a mask with smoother gradients in order to
* avoid banding.
*
......
......@@ -36,22 +36,20 @@
* * #GstClockTime `duration`: the duration of the buffer.
* * #GstClockTime `endtime`: the end time of the buffer that triggered the message as stream time (this
* is deprecated, as it can be calculated from stream-time + duration)
* * #GstValueList of #gfloat `magnitude`: the level for each frequency band in dB.
* * A #GST_TYPE_LIST value of #gfloat `magnitude`: the level for each frequency band in dB.
* All values below the value of the
* #GstSpectrum:threshold property will be set to the threshold. Only present
* if the #GstSpectrum:message-magnitude property is %TRUE.
* * #GstValueList of #gfloat `phase`: The phase for each frequency band. The value is between -pi and pi. Only
* * A #GST_TYPE_LIST of #gfloat `phase`: The phase for each frequency band. The value is between -pi and pi. Only
* present if the #GstSpectrum:message-phase property is %TRUE.
*
* If #GstSpectrum:multi-channel property is set to true. magnitude and phase
* fields will be each a nested #GstValueArray. The first dimension are the
* fields will be each a nested #GST_TYPE_ARRAY value. The first dimension are the
* channels and the second dimension are the values.
*
* ## Example application
*
* <informalexample><programlisting language="C">
* <xi:include xmlns:xi="http://www.w3.org/2003/XInclude" parse="text" href="../../../../tests/examples/spectrum/spectrum-example.c" />
* </programlisting></informalexample>
* {{ tests/examples/spectrum/spectrum-example.c }}
*
*/
......
......@@ -30,7 +30,7 @@
* Videomixer will do colorspace conversion.
*
* Individual parameters for each input stream can be configured on the
* #GstVideoMixer2Pad.
* videomixer pads.
*
* ## Sample pipelines
* |[
......
......@@ -41,4 +41,5 @@ if have_osxaudio
install : true,
install_dir : plugins_install_dir)
pkgconfig.generate(gstosxaudio, install_dir : plugins_pkgconfig_install_dir)
plugins += [gstosxaudio]
endif
......@@ -24,4 +24,5 @@ if have_objc and osxvideo_opengl_dep.found() and osxvideo_cocoa_dep.found()
install : true,
install_dir : plugins_install_dir)
pkgconfig.generate(gstosxvideo, install_dir : plugins_pkgconfig_install_dir)
plugins += [gstosxvideo]
endif
......@@ -1231,7 +1231,7 @@ failed:
/*
* Get the list of supported capture formats, a list of
* <code>struct v4l2_fmtdesc</code>.
* `struct v4l2_fmtdesc`.
*/
static GSList *
gst_v4l2_object_get_format_list (GstV4l2Object * v4l2object)
......
......@@ -24,4 +24,5 @@ if have_waveform
install : true,
install_dir : plugins_install_dir)
pkgconfig.generate(gstwaveformsink, install_dir : plugins_pkgconfig_install_dir)
plugins += [gstwaveformsink]
endif