Commit da2d0554 authored by Thibault Saunier's avatar Thibault Saunier

docs: Port all docstring to gtk-doc markdown

parent ef118bd5
......@@ -18,19 +18,19 @@
*/
/**
* SECTION:element-aasink
* @title: aasink
* @see_also: #GstCACASink
*
* Displays video as b/w ascii art.
*
* <refsect2>
* <title>Example launch line</title>
* ## Example launch line
* |[
* gst-launch-1.0 filesrc location=test.avi ! decodebin ! videoconvert ! aasink
* ]| This pipeline renders a video to ascii art into a separate window.
* |[
* gst-launch-1.0 filesrc location=test.avi ! decodebin ! videoconvert ! aasink driver=curses
* ]| This pipeline renders a video to ascii art into the current terminal.
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
......
......@@ -19,12 +19,13 @@
/**
* SECTION:element-cairooverlay
* @title: cairooverlay
*
* cairooverlay renders an overlay using a application provided render function.
*
* The full example can be found in tests/examples/cairo/cairo_overlay.c
* <refsect2>
* <title>Example code</title>
*
* ## Example code
* |[
*
* #include &lt;gst/gst.h&gt;
......@@ -37,7 +38,7 @@
* int width;
* int height;
* } CairoOverlayState;
*
*
* ...
*
* static void
......@@ -50,7 +51,7 @@
* }
*
* static void
* draw_overlay (GstElement * overlay, cairo_t * cr, guint64 timestamp,
* draw_overlay (GstElement * overlay, cairo_t * cr, guint64 timestamp,
* guint64 duration, gpointer user_data)
* {
* CairoOverlayState *s = (CairoOverlayState *)user_data;
......@@ -66,7 +67,7 @@
* cairo_move_to (cr, 0, 0);
* cairo_curve_to (cr, 0,-30, -50,-30, -50,0);
* cairo_curve_to (cr, -50,30, 0,35, 0,60 );
* cairo_curve_to (cr, 0,35, 50,30, 50,0 ); *
* cairo_curve_to (cr, 0,35, 50,30, 50,0 ); *
* cairo_curve_to (cr, 50,-30, 0,-30, 0,0 );
* cairo_set_source_rgba (cr, 0.9, 0.0, 0.1, 0.7);
* cairo_fill (cr);
......@@ -78,12 +79,12 @@
*
* g_signal_connect (cairo_overlay, &quot;draw&quot;, G_CALLBACK (draw_overlay),
* overlay_state);
* g_signal_connect (cairo_overlay, &quot;caps-changed&quot;,
* g_signal_connect (cairo_overlay, &quot;caps-changed&quot;,
* G_CALLBACK (prepare_overlay), overlay_state);
* ...
*
* ]|
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
......@@ -538,7 +539,7 @@ gst_cairo_overlay_class_init (GstCairoOverlayClass * klass)
* @cr: Cairo context to draw to.
* @timestamp: Timestamp (see #GstClockTime) of the current buffer.
* @duration: Duration (see #GstClockTime) of the current buffer.
*
*
* This signal is emitted when the overlay should be drawn.
*/
gst_cairo_overlay_signals[SIGNAL_DRAW] =
......@@ -555,7 +556,7 @@ gst_cairo_overlay_class_init (GstCairoOverlayClass * klass)
* GstCairoOverlay::caps-changed:
* @overlay: Overlay element emitting the signal.
* @caps: The #GstCaps of the element.
*
*
* This signal is emitted when the caps of the element has changed.
*/
gst_cairo_overlay_signals[SIGNAL_CAPS_CHANGED] =
......
......@@ -20,20 +20,20 @@
/**
* SECTION:element-dvdec
* @title: dvdec
*
* dvdec decodes DV video into raw video. The element expects a full DV frame
* as input, which is 120000 bytes for NTSC and 144000 for PAL video.
*
* This element can perform simple frame dropping with the #GstDVDec:drop-factor
* property. Setting this property to a value N > 1 will only decode every
* property. Setting this property to a value N > 1 will only decode every
* Nth frame.
*
* <refsect2>
* <title>Example launch line</title>
* ## Example launch line
* |[
* gst-launch-1.0 filesrc location=test.dv ! dvdemux name=demux ! dvdec ! xvimagesink
* ]| This pipeline decodes and renders the raw DV stream to a videosink.
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
......
......@@ -31,6 +31,7 @@
/**
* SECTION:element-dvdemux
* @title: dvdemux
*
* dvdemux splits raw DV into its audio and video components. The audio will be
* decoded raw samples and the video will be encoded DV video.
......@@ -38,12 +39,11 @@
* This element can operate in both push and pull mode depending on the
* capabilities of the upstream peer.
*
* <refsect2>
* <title>Example launch line</title>
* ## Example launch line
* |[
* gst-launch-1.0 filesrc location=test.dv ! dvdemux name=demux ! queue ! audioconvert ! alsasink demux. ! queue ! dvdec ! xvimagesink
* ]| This pipeline decodes and renders the raw DV stream to an audio and a videosink.
* </refsect2>
*
*/
/* DV output has two modes, normal and wide. The resolution is the same in both
......
......@@ -21,21 +21,21 @@
/**
* SECTION:element-flacdec
* @title: flacdec
* @see_also: #GstFlacEnc
*
* flacdec decodes FLAC streams.
* <ulink url="http://flac.sourceforge.net/">FLAC</ulink>
* is a Free Lossless Audio Codec.
*
* <refsect2>
* <title>Example launch line</title>
* ## Example launch line
* |[
* gst-launch-1.0 filesrc location=media/small/dark.441-16-s.flac ! flacparse ! flacdec ! audioconvert ! audioresample ! autoaudiosink
* ]|
* |[
* gst-launch-1.0 souphttpsrc location=http://gstreamer.freedesktop.org/media/small/dark.441-16-s.flac ! flacparse ! flacdec ! audioconvert ! audioresample ! queue min-threshold-buffers=10 ! autoaudiosink
* ]|
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
......
......@@ -18,6 +18,7 @@
*/
/**
* SECTION:element-flacenc
* @title: flacenc
* @see_also: #GstFlacDec
*
* flacenc encodes FLAC streams.
......@@ -25,8 +26,7 @@
* is a Free Lossless Audio Codec. FLAC audio can directly be written into
* a file, or embedded into containers such as oggmux or matroskamux.
*
* <refsect2>
* <title>Example launch line</title>
* ## Example launch line
* |[
* gst-launch-1.0 audiotestsrc num-buffers=100 ! flacenc ! filesink location=beep.flac
* ]| Encode a short sine wave into FLAC
......@@ -36,7 +36,7 @@
* |[
* gst-launch-1.0 cdparanoiasrc track=5 ! queue ! audioconvert ! flacenc ! filesink location=track5.flac
* ]| Rip track 5 of an audio CD and encode it losslessly to a FLAC file
* </refsect2>
*
*/
/* TODO: - We currently don't handle discontinuities in the stream in a useful
......
......@@ -23,6 +23,7 @@
/**
* SECTION:element-flactag
* @title: flactag
* @see_also: #flacenc, #flacdec, #GstTagSetter
*
* The flactag element can change the tag contained within a raw
......@@ -34,14 +35,13 @@
* automatically (and merged according to the merge mode set via the tag
* setter interface).
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
* |[
* gst-launch-1.0 -v filesrc location=foo.flac ! flactag ! filesink location=bar.flac
* ]| This element is not useful with gst-launch, because it does not support
* setting the tags on a #GstTagSetter interface. Conceptually, the element
* will usually be used in this order though.
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
......
......@@ -19,6 +19,7 @@
/**
* SECTION:element-gdkpixbufoverlay
* @title: gdkpixbufoverlay
*
* The gdkpixbufoverlay element overlays an image loaded from file onto
* a video stream.
......@@ -32,14 +33,13 @@
*
* Negative offsets are also not yet supported.
*
* <refsect2>
* <title>Example launch line</title>
* ## Example launch line
* |[
* gst-launch-1.0 -v videotestsrc ! gdkpixbufoverlay location=image.png ! autovideosink
* ]|
* Overlays the image in image.png onto the test video picture produced by
* videotestsrc.
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
......
......@@ -19,6 +19,7 @@
/**
* SECTION:element-gdkpixbufsink
* @title: gdkpixbufsink
*
* This sink element takes RGB or RGBA images as input and wraps them into
* #GdkPixbuf objects, for easy saving to file via the
......@@ -27,23 +28,18 @@
*
* There are two ways to use this element and obtain the #GdkPixbuf objects
* created:
* <itemizedlist>
* <listitem>
* Watching for element messages named <classname>&quot;preroll-pixbuf&quot;
* </classname> or <classname>&quot;pixbuf&quot;</classname> on the bus, which
*
* * Watching for element messages named `preroll-pixbuf` or `pixbuf` on the bus, which
* will be posted whenever an image would usually be rendered. See below for
* more details on these messages and how to extract the pixbuf object
* contained in them.
* </listitem>
* <listitem>
* Retrieving the current pixbuf via the #GstGdkPixbufSink:last-pixbuf property
*
* * Retrieving the current pixbuf via the #GstGdkPixbufSink:last-pixbuf property
* when needed. This is the easiest way to get at pixbufs for snapshotting
* purposes - just wait until the pipeline is prerolled (ASYNC_DONE message
* on the bus), then read the property. If you use this method, you may want
* to disable message posting by setting the #GstGdkPixbufSink:post-messages
* property to %FALSE. This avoids unnecessary memory overhead.
* </listitem>
* </itemizedlist>
*
* The primary purpose of this element is to abstract away the #GstBuffer to
* #GdkPixbuf conversion. Other than that it's very similar to the fakesink
......@@ -54,20 +50,17 @@
* ximagesink, xvimagesink or some other suitable video sink in connection
* with the #GstXOverlay interface instead if you want to do video playback.
*
* <refsect2>
* <title>Message details</title>
* ## Message details
*
* As mentioned above, this element will by default post element messages
* containing structures named <classname>&quot;preroll-pixbuf&quot;
* </classname> or <classname>&quot;pixbuf&quot;</classname> on the bus (this
* containing structures named `preroll-pixbuf`
* ` or `pixbuf` on the bus (this
* can be disabled by setting the #GstGdkPixbufSink:post-messages property
* to %FALSE though). The element message structure has the following fields:
* <itemizedlist>
* <listitem>
* <classname>&quot;pixbuf&quot;</classname>: the #GdkPixbuf object
* </listitem>
* <listitem>
* <classname>&quot;pixel-aspect-ratio&quot;</classname>: the pixel aspect
* ratio (PAR) of the input image (this field contains a #GstFraction); the
*
* * `pixbuf`: the #GdkPixbuf object
* * `pixel-aspect-ratio`: the pixel aspect ratio (PAR) of the input image
* (this field contains a #GstFraction); the
* PAR is usually 1:1 for images, but is often something non-1:1 in the case
* of video input. In this case the image may be distorted and you may need
* to rescale it accordingly before saving it to file or displaying it. This
......@@ -76,20 +69,15 @@
* according to the size of the output window, in which case it is much more
* efficient to only scale once rather than twice). You can put a videoscale
* element and a capsfilter element with
* <literal>video/x-raw-rgb,pixel-aspect-ratio=(fraction)1/1</literal> caps
* `video/x-raw-rgb,pixel-aspect-ratio=(fraction)1/1` caps
* in front of this element to make sure the pixbufs always have a 1:1 PAR.
* </listitem>
* </itemizedlist>
* </refsect2>
*
* <refsect2>
* <title>Example pipeline</title>
* ## Example pipeline
* |[
* gst-launch-1.0 -m -v videotestsrc num-buffers=1 ! gdkpixbufsink
* ]| Process one single test image as pixbuf (note that the output you see will
* be slightly misleading. The message structure does contain a valid pixbuf
* object even if the structure string says &apos;(NULL)&apos;).
* </refsect2>
*/
#ifdef HAVE_CONFIG_H
......
......@@ -494,7 +494,7 @@ gst_jack_audio_connection_remove_client (GstJackAudioConnection * conn,
* Get the jack client connection for @id and @server. Connections to the same
* @id and @server will receive the same physical Jack client connection and
* will therefore be scheduled in the same process callback.
*
*
* Returns: a #GstJackAudioClient.
*/
GstJackAudioClient *
......
......@@ -21,36 +21,36 @@
/**
* SECTION:element-jackaudiosink
* @title: jackaudiosink
* @see_also: #GstAudioBaseSink, #GstAudioRingBuffer
*
* A Sink that outputs data to Jack ports.
*
* It will create N Jack ports named out_&lt;name&gt;_&lt;num&gt; where
*
* It will create N Jack ports named out_&lt;name&gt;_&lt;num&gt; where
* &lt;name&gt; is the element name and &lt;num&gt; is starting from 1.
* Each port corresponds to a gstreamer channel.
*
*
* The samplerate as exposed on the caps is always the same as the samplerate of
* the jack server.
*
*
* When the #GstJackAudioSink:connect property is set to auto, this element
* will try to connect each output port to a random physical jack input pin. In
* this mode, the sink will expose the number of physical channels on its pad
* caps.
*
*
* When the #GstJackAudioSink:connect property is set to none, the element will
* accept any number of input channels and will create (but not connect) an
* output port for each channel.
*
*
* The element will generate an error when the Jack server is shut down when it
* was PAUSED or PLAYING. This element does not support dynamic rate and buffer
* size changes at runtime.
*
* <refsect2>
* <title>Example launch line</title>
*
* ## Example launch line
* |[
* gst-launch-1.0 audiotestsrc ! jackaudiosink
* ]| Play a sine wave to using jack.
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
......
......@@ -44,7 +44,7 @@ typedef struct _GstJackAudioSinkClass GstJackAudioSinkClass;
/**
* GstJackAudioSink:
*
*
* Opaque #GstJackAudioSink.
*/
struct _GstJackAudioSink {
......
......@@ -42,34 +42,34 @@
/**
* SECTION:element-jackaudiosrc
* @title: jackaudiosrc
* @see_also: #GstAudioBaseSrc, #GstAudioRingBuffer
*
* A Src that inputs data from Jack ports.
*
* It will create N Jack ports named in_&lt;name&gt;_&lt;num&gt; where
*
* It will create N Jack ports named in_&lt;name&gt;_&lt;num&gt; where
* &lt;name&gt; is the element name and &lt;num&gt; is starting from 1.
* Each port corresponds to a gstreamer channel.
*
*
* The samplerate as exposed on the caps is always the same as the samplerate of
* the jack server.
*
*
* When the #GstJackAudioSrc:connect property is set to auto, this element
* will try to connect each input port to a random physical jack output pin.
*
* will try to connect each input port to a random physical jack output pin.
*
* When the #GstJackAudioSrc:connect property is set to none, the element will
* accept any number of output channels and will create (but not connect) an
* input port for each channel.
*
*
* The element will generate an error when the Jack server is shut down when it
* was PAUSED or PLAYING. This element does not support dynamic rate and buffer
* size changes at runtime.
*
* <refsect2>
* <title>Example launch line</title>
*
* ## Example launch line
* |[
* gst-launch-1.0 jackaudiosrc connect=0 ! jackaudiosink connect=0
* ]| Get audio input into gstreamer from jack.
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
......
......@@ -22,15 +22,15 @@
/**
* SECTION:element-jpegdec
* @title: jpegdec
*
* Decodes jpeg images.
*
* <refsect2>
* <title>Example launch line</title>
* ## Example launch line
* |[
* gst-launch-1.0 -v filesrc location=mjpeg.avi ! avidemux ! queue ! jpegdec ! videoconvert ! videoscale ! autovideosink
* ]| The above pipeline decode the mjpeg stream and renders it to the screen.
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
......
......@@ -20,16 +20,16 @@
*/
/**
* SECTION:element-jpegenc
* @title: jpegenc
*
* Encodes jpeg images.
*
* <refsect2>
* <title>Example launch line</title>
* ## Example launch line
* |[
* gst-launch-1.0 videotestsrc num-buffers=50 ! video/x-raw, framerate='(fraction)'5/1 ! jpegenc ! avimux ! filesink location=mjpeg.avi
* ]| a pipeline to mux 5 JPEG frames per second into a 10 sec. long motion jpeg
* avi.
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
......
......@@ -19,6 +19,7 @@
/**
* SECTION:element-smokedec
* @title: smokedec
*
* Decodes images in smoke format.
*/
......
......@@ -18,6 +18,7 @@
*/
/**
* SECTION:element-smokeenc
* @title: smokeenc
*
* Encodes images in smoke format.
*/
......
......@@ -18,12 +18,12 @@
*/
/**
* SECTION:element-cacasink
* @title: cacasink
* @see_also: #GstAASink
*
* Displays video as color ascii art.
*
* <refsect2>
* <title>Example launch line</title>
* ## Example launch line
* |[
* CACA_GEOMETRY=160x60 CACA_FONT=5x7 gst-launch-1.0 filesrc location=test.avi ! decodebin ! videoconvert ! cacasink
* ]| This pipeline renders a video to ascii art into a separate window using a
......@@ -31,7 +31,7 @@
* |[
* CACA_DRIVER=ncurses gst-launch-1.0 filesrc location=test.avi ! decodebin ! videoconvert ! cacasink
* ]| This pipeline renders a video to ascii art into the current terminal.
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
......
......@@ -19,6 +19,7 @@
*/
/**
* SECTION:element-pngdec
* @title: pngdec
*
* Decodes png images. If there is no framerate set on sink caps, it sends EOS
* after the first picture.
......
......@@ -20,6 +20,7 @@
*/
/**
* SECTION:element-pngenc
* @title: pngenc
*
* Encodes png images.
*/
......
......@@ -23,13 +23,13 @@
/**
* SECTION:element-pulsesink
* @title: pulsesink
* @see_also: pulsesrc
*
* This element outputs audio to a
* <ulink href="http://www.pulseaudio.org">PulseAudio sound server</ulink>.
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
* |[
* gst-launch-1.0 -v filesrc location=sine.ogg ! oggdemux ! vorbisdec ! audioconvert ! audioresample ! pulsesink
* ]| Play an Ogg/Vorbis file.
......@@ -40,7 +40,7 @@
* gst-launch-1.0 -v audiotestsrc ! pulsesink stream-properties="props,media.title=test"
* ]| Play a sine wave and set a stream property. The property can be checked
* with "pactl list".
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
......
......@@ -21,17 +21,17 @@
/**
* SECTION:element-pulsesrc
* @title: pulsesrc
* @see_also: pulsesink
*
* This element captures audio from a
* <ulink href="http://www.pulseaudio.org">PulseAudio sound server</ulink>.
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
* |[
* gst-launch-1.0 -v pulsesrc ! audioconvert ! vorbisenc ! oggmux ! filesink location=alsasrc.ogg
* ]| Record from a sound card using pulseaudio and encode to Ogg/Vorbis.
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
......
......@@ -21,16 +21,16 @@
*/
/**
* SECTION:element-dv1394src
* @title: dv1394src
*
* Read DV (digital video) data from firewire port.
*
* <refsect2>
* <title>Example launch line</title>
* ## Example launch line
* |[
* gst-launch-1.0 dv1394src ! queue ! dvdemux name=d ! queue ! dvdec ! xvimagesink d. ! queue ! alsasink
* ]| This pipeline captures from the firewire port and displays it (might need
* format converters for audio/video).
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
......
......@@ -18,18 +18,18 @@
*/
/**
* SECTION:element-hdv1394src
* @title: hdv1394src
*
* Read MPEG-TS data from firewire port.
*
* <refsect2>
* <title>Example launch line</title>
* ## Example launch line
* |[
* gst-launch-1.0 hdv1394src ! queue ! decodebin name=d ! queue ! xvimagesink d. ! queue ! alsasink
* ]| captures from the firewire port and plays the streams.
* |[
* gst-launch-1.0 hdv1394src ! queue ! filesink location=mydump.ts
* ]| capture to a disk file
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
......
......@@ -21,18 +21,18 @@
/**
* SECTION:element-shout2send
* @title: shout2send
*
* shout2send pushes a media stream to an Icecast server
*
* <refsect2>
* <title>Example launch line</title>
* ## Example launch line
* |[
* gst-launch-1.0 uridecodebin uri=file:///path/to/audiofile ! audioconvert ! vorbisenc ! oggmux ! shout2send mount=/stream.ogg port=8000 username=source password=somepassword ip=server_IP_address_or_hostname
* ]| This pipeline demuxes, decodes, re-encodes and re-muxes an audio
* media file into oggvorbis and sends the resulting stream to an Icecast
* server. Properties mount, port, username and password are all server-config
* dependent.
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
......
......@@ -18,20 +18,20 @@
*/
/**
* SECTION:element-gstsouphttpclientsink
* @title: gstsouphttpclientsink
*
* The souphttpclientsink element sends pipeline data to an HTTP server
* using HTTP PUT commands.
*
* <refsect2>
* <title>Example launch line</title>
* ## Example launch line
* |[
* gst-launch-1.0 -v videotestsrc num-buffers=300 ! theoraenc ! oggmux !
* souphttpclientsink location=http://server/filename.ogv
* ]|
*
*
* This example encodes 10 seconds of video and sends it to the HTTP
* server "server" using HTTP PUT commands.
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
......