Commit d2469972 authored by Thibault Saunier's avatar Thibault Saunier Committed by Thibault Saunier

Pass our C files to gst-indent

parent 42285ae1
Pipeline #33072 passed with stage
in 1 minute and 50 seconds
......@@ -3,14 +3,12 @@
static GMainLoop *loop;
static gboolean
my_bus_callback (GstBus *bus,
GstMessage *message,
gpointer data)
my_bus_callback (GstBus * bus, GstMessage * message, gpointer data)
{
g_print ("Got %s message\n", GST_MESSAGE_TYPE_NAME (message));
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_ERROR: {
case GST_MESSAGE_ERROR:{
GError *err;
gchar *debug;
......@@ -39,8 +37,7 @@ my_bus_callback (GstBus *bus,
}
gint
main (gint argc,
gchar *argv[])
main (gint argc, gchar * argv[])
{
GstElement *pipeline;
GstBus *bus;
......
#include <gst/gst.h>
static void
link_to_multiplexer (GstPad *tolink_pad,
GstElement *mux)
link_to_multiplexer (GstPad * tolink_pad, GstElement * mux)
{
GstPad *pad;
gchar *srcname, *sinkname;
......@@ -19,9 +18,9 @@ link_to_multiplexer (GstPad *tolink_pad,
}
static void
some_function (GstElement *tee)
some_function (GstElement * tee)
{
GstPad * pad;
GstPad *pad;
gchar *name;
pad = gst_element_get_request_pad (tee, "src%d");
......
......@@ -6,26 +6,34 @@
/*
* Java Bindings
*/
static jstring gst_native_get_gstreamer_info (JNIEnv* env, jobject thiz) {
char *version_utf8 = gst_version_string();
jstring *version_jstring = (*env)->NewStringUTF(env, version_utf8);
static jstring
gst_native_get_gstreamer_info (JNIEnv * env, jobject thiz)
{
char *version_utf8 = gst_version_string ();
jstring *version_jstring = (*env)->NewStringUTF (env, version_utf8);
g_free (version_utf8);
return version_jstring;
}
static JNINativeMethod native_methods[] = {
{ "nativeGetGStreamerInfo", "()Ljava/lang/String;", (void *) gst_native_get_gstreamer_info}
{"nativeGetGStreamerInfo", "()Ljava/lang/String;",
(void *) gst_native_get_gstreamer_info}
};
jint JNI_OnLoad(JavaVM *vm, void *reserved) {
jint
JNI_OnLoad (JavaVM * vm, void *reserved)
{
JNIEnv *env = NULL;
if ((*vm)->GetEnv(vm, (void**) &env, JNI_VERSION_1_4) != JNI_OK) {
__android_log_print (ANDROID_LOG_ERROR, "tutorial-1", "Could not retrieve JNIEnv");
if ((*vm)->GetEnv (vm, (void **) &env, JNI_VERSION_1_4) != JNI_OK) {
__android_log_print (ANDROID_LOG_ERROR, "tutorial-1",
"Could not retrieve JNIEnv");
return 0;
}
jclass klass = (*env)->FindClass (env, "org/freedesktop/gstreamer/tutorials/tutorial_1/Tutorial1");
(*env)->RegisterNatives (env, klass, native_methods, G_N_ELEMENTS(native_methods));
jclass klass = (*env)->FindClass (env,
"org/freedesktop/gstreamer/tutorials/tutorial_1/Tutorial1");
(*env)->RegisterNatives (env, klass, native_methods,
G_N_ELEMENTS (native_methods));
return JNI_VERSION_1_4;
}
#include <gst/gst.h>
int main(int argc, char *argv[]) {
int
main (int argc, char *argv[])
{
GstElement *pipeline;
GstBus *bus;
GstMessage *msg;
......@@ -9,14 +11,19 @@ int main(int argc, char *argv[]) {
gst_init (&argc, &argv);
/* Build the pipeline */
pipeline = gst_parse_launch ("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
pipeline =
gst_parse_launch
("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm",
NULL);
/* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */
if (msg != NULL)
......
#include <gst/gst.h>
#include <string.h>
typedef struct _CustomData {
typedef struct _CustomData
{
gboolean is_live;
GstElement *pipeline;
GMainLoop *loop;
} CustomData;
static void cb_message (GstBus *bus, GstMessage *msg, CustomData *data) {
static void
cb_message (GstBus * bus, GstMessage * msg, CustomData * data)
{
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR: {
case GST_MESSAGE_ERROR:{
GError *err;
gchar *debug;
......@@ -28,11 +31,12 @@ static void cb_message (GstBus *bus, GstMessage *msg, CustomData *data) {
gst_element_set_state (data->pipeline, GST_STATE_READY);
g_main_loop_quit (data->loop);
break;
case GST_MESSAGE_BUFFERING: {
case GST_MESSAGE_BUFFERING:{
gint percent = 0;
/* If the stream is live, we do not care about buffering. */
if (data->is_live) break;
if (data->is_live)
break;
gst_message_parse_buffering (msg, &percent);
g_print ("Buffering (%3d%%)\r", percent);
......@@ -51,10 +55,12 @@ static void cb_message (GstBus *bus, GstMessage *msg, CustomData *data) {
default:
/* Unhandled message */
break;
}
}
}
int main(int argc, char *argv[]) {
int
main (int argc, char *argv[])
{
GstElement *pipeline;
GstBus *bus;
GstStateChangeReturn ret;
......@@ -68,7 +74,10 @@ int main(int argc, char *argv[]) {
memset (&data, 0, sizeof (data));
/* Build the pipeline */
pipeline = gst_parse_launch ("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
pipeline =
gst_parse_launch
("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm",
NULL);
bus = gst_element_get_bus (pipeline);
/* Start playing */
......
......@@ -2,17 +2,20 @@
#include <stdio.h>
#include <gst/gst.h>
typedef struct _CustomData {
typedef struct _CustomData
{
GstElement *pipeline;
GstElement *video_sink;
GMainLoop *loop;
gboolean playing; /* Playing or Paused */
gdouble rate; /* Current playback rate (can be negative) */
gboolean playing; /* Playing or Paused */
gdouble rate; /* Current playback rate (can be negative) */
} CustomData;
/* Send seek event to change rate */
static void send_seek_event (CustomData *data) {
static void
send_seek_event (CustomData * data)
{
gint64 position;
GstEvent *seek_event;
......@@ -24,11 +27,15 @@ static void send_seek_event (CustomData *data) {
/* Create the seek event */
if (data->rate > 0) {
seek_event = gst_event_new_seek (data->rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE,
GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_SET, -1);
seek_event =
gst_event_new_seek (data->rate, GST_FORMAT_TIME,
GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, GST_SEEK_TYPE_SET,
position, GST_SEEK_TYPE_SET, -1);
} else {
seek_event = gst_event_new_seek (data->rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE,
GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_SET, position);
seek_event =
gst_event_new_seek (data->rate, GST_FORMAT_TIME,
GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, GST_SEEK_TYPE_SET, 0,
GST_SEEK_TYPE_SET, position);
}
if (data->video_sink == NULL) {
......@@ -43,46 +50,51 @@ static void send_seek_event (CustomData *data) {
}
/* Process keyboard input */
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) {
static gboolean
handle_keyboard (GIOChannel * source, GIOCondition cond, CustomData * data)
{
gchar *str = NULL;
if (g_io_channel_read_line (source, &str, NULL, NULL, NULL) != G_IO_STATUS_NORMAL) {
if (g_io_channel_read_line (source, &str, NULL, NULL,
NULL) != G_IO_STATUS_NORMAL) {
return TRUE;
}
switch (g_ascii_tolower (str[0])) {
case 'p':
data->playing = !data->playing;
gst_element_set_state (data->pipeline, data->playing ? GST_STATE_PLAYING : GST_STATE_PAUSED);
g_print ("Setting state to %s\n", data->playing ? "PLAYING" : "PAUSE");
break;
case 's':
if (g_ascii_isupper (str[0])) {
data->rate *= 2.0;
} else {
data->rate /= 2.0;
}
send_seek_event (data);
break;
case 'd':
data->rate *= -1.0;
send_seek_event (data);
break;
case 'n':
if (data->video_sink == NULL) {
/* If we have not done so, obtain the sink through which we will send the step events */
g_object_get (data->pipeline, "video-sink", &data->video_sink, NULL);
}
gst_element_send_event (data->video_sink,
gst_event_new_step (GST_FORMAT_BUFFERS, 1, ABS (data->rate), TRUE, FALSE));
g_print ("Stepping one frame\n");
break;
case 'q':
g_main_loop_quit (data->loop);
break;
default:
break;
case 'p':
data->playing = !data->playing;
gst_element_set_state (data->pipeline,
data->playing ? GST_STATE_PLAYING : GST_STATE_PAUSED);
g_print ("Setting state to %s\n", data->playing ? "PLAYING" : "PAUSE");
break;
case 's':
if (g_ascii_isupper (str[0])) {
data->rate *= 2.0;
} else {
data->rate /= 2.0;
}
send_seek_event (data);
break;
case 'd':
data->rate *= -1.0;
send_seek_event (data);
break;
case 'n':
if (data->video_sink == NULL) {
/* If we have not done so, obtain the sink through which we will send the step events */
g_object_get (data->pipeline, "video-sink", &data->video_sink, NULL);
}
gst_element_send_event (data->video_sink,
gst_event_new_step (GST_FORMAT_BUFFERS, 1, ABS (data->rate), TRUE,
FALSE));
g_print ("Stepping one frame\n");
break;
case 'q':
g_main_loop_quit (data->loop);
break;
default:
break;
}
g_free (str);
......@@ -90,7 +102,9 @@ static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomDa
return TRUE;
}
int main(int argc, char *argv[]) {
int
main (int argc, char *argv[])
{
CustomData data;
GstStateChangeReturn ret;
GIOChannel *io_stdin;
......@@ -102,16 +116,18 @@ int main(int argc, char *argv[]) {
memset (&data, 0, sizeof (data));
/* Print usage map */
g_print (
"USAGE: Choose one of the following options, then press enter:\n"
" 'P' to toggle between PAUSE and PLAY\n"
" 'S' to increase playback speed, 's' to decrease playback speed\n"
" 'D' to toggle playback direction\n"
" 'N' to move to next frame (in the current direction, better in PAUSE)\n"
" 'Q' to quit\n");
g_print ("USAGE: Choose one of the following options, then press enter:\n"
" 'P' to toggle between PAUSE and PLAY\n"
" 'S' to increase playback speed, 's' to decrease playback speed\n"
" 'D' to toggle playback direction\n"
" 'N' to move to next frame (in the current direction, better in PAUSE)\n"
" 'Q' to quit\n");
/* Build the pipeline */
data.pipeline = gst_parse_launch ("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
data.pipeline =
gst_parse_launch
("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm",
NULL);
/* Add a keyboard watch so we get notified of keystrokes */
#ifdef G_OS_WIN32
......@@ -119,7 +135,7 @@ int main(int argc, char *argv[]) {
#else
io_stdin = g_io_channel_unix_new (fileno (stdin));
#endif
g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);
g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc) handle_keyboard, &data);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
......
#include <clutter-gst/clutter-gst.h>
/* Setup the video texture once its size is known */
void size_change (ClutterActor *texture, gint width, gint height, gpointer user_data) {
void
size_change (ClutterActor * texture, gint width, gint height,
gpointer user_data)
{
ClutterActor *stage;
gfloat new_x, new_y, new_width, new_height;
gfloat stage_width, stage_height;
......@@ -21,7 +24,7 @@ void size_change (ClutterActor *texture, gint width, gint height, gpointer user_
new_x = 0;
new_y = (stage_height - new_height) / 2;
} else {
new_width = (width * stage_height) / height;
new_width = (width * stage_height) / height;
new_height = stage_height;
new_x = (stage_width - new_width) / 2;
......@@ -29,13 +32,18 @@ void size_change (ClutterActor *texture, gint width, gint height, gpointer user_
}
clutter_actor_set_position (texture, new_x, new_y);
clutter_actor_set_size (texture, new_width, new_height);
clutter_actor_set_rotation (texture, CLUTTER_Y_AXIS, 0.0, stage_width / 2, 0, 0);
clutter_actor_set_rotation (texture, CLUTTER_Y_AXIS, 0.0, stage_width / 2, 0,
0);
/* Animate it */
animation = clutter_actor_animate (texture, CLUTTER_LINEAR, 10000, "rotation-angle-y", 360.0, NULL);
animation =
clutter_actor_animate (texture, CLUTTER_LINEAR, 10000, "rotation-angle-y",
360.0, NULL);
clutter_animation_set_loop (animation, TRUE);
}
int main(int argc, char *argv[]) {
int
main (int argc, char *argv[])
{
GstElement *pipeline, *sink;
ClutterTimeline *timeline;
ClutterActor *stage, *texture;
......@@ -50,14 +58,19 @@ int main(int argc, char *argv[]) {
/* Make a timeline */
timeline = clutter_timeline_new (1000);
g_object_set(timeline, "loop", TRUE, NULL);
g_object_set (timeline, "loop", TRUE, NULL);
/* Create new texture and disable slicing so the video is properly mapped onto it */
texture = CLUTTER_ACTOR (g_object_new (CLUTTER_TYPE_TEXTURE, "disable-slicing", TRUE, NULL));
texture =
CLUTTER_ACTOR (g_object_new (CLUTTER_TYPE_TEXTURE, "disable-slicing",
TRUE, NULL));
g_signal_connect (texture, "size-change", G_CALLBACK (size_change), NULL);
/* Build the GStreamer pipeline */
pipeline = gst_parse_launch ("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
pipeline =
gst_parse_launch
("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm",
NULL);
/* Instantiate the Clutter sink */
sink = gst_element_factory_make ("autocluttersink", NULL);
......@@ -70,7 +83,7 @@ int main(int argc, char *argv[]) {
return -1;
}
/* Link GStreamer with Clutter by passing the Clutter texture to the Clutter sink*/
/* Link GStreamer with Clutter by passing the Clutter texture to the Clutter sink */
g_object_set (sink, "texture", texture, NULL);
/* Add the Clutter sink to the pipeline */
......@@ -86,7 +99,7 @@ int main(int argc, char *argv[]) {
clutter_group_add (CLUTTER_GROUP (stage), texture);
clutter_actor_show_all (stage);
clutter_main();
clutter_main ();
/* Free resources */
gst_element_set_state (pipeline, GST_STATE_NULL);
......
#include <gst/gst.h>
int main(int argc, char *argv[]) {
int
main (int argc, char *argv[])
{
GstElement *pipeline, *source, *sink;
GstBus *bus;
GstMessage *msg;
......@@ -42,7 +44,9 @@ int main(int argc, char *argv[]) {
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
......@@ -52,8 +56,10 @@ int main(int argc, char *argv[]) {
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_printerr ("Error received from element %s: %s\n",
GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n",
debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
......@@ -73,4 +79,4 @@ int main(int argc, char *argv[]) {
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
\ No newline at end of file
}
#include <gst/gst.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
typedef struct _CustomData
{
GstElement *pipeline;
GstElement *source;
GstElement *convert;
......@@ -9,9 +10,12 @@ typedef struct _CustomData {
} CustomData;
/* Handler for the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);
static void pad_added_handler (GstElement * src, GstPad * pad,
CustomData * data);
int main(int argc, char *argv[]) {
int
main (int argc, char *argv[])
{
CustomData data;
GstBus *bus;
GstMessage *msg;
......@@ -36,7 +40,8 @@ int main(int argc, char *argv[]) {
/* Build the pipeline. Note that we are NOT linking the source at this
* point. We will do it later. */
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert , data.sink, NULL);
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert,
data.sink, NULL);
if (!gst_element_link (data.convert, data.sink)) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
......@@ -44,10 +49,13 @@ int main(int argc, char *argv[]) {
}
/* Set the URI to play */
g_object_set (data.source, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
g_object_set (data.source, "uri",
"https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm",
NULL);
/* Connect to the pad-added signal */
g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);
g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler),
&data);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
......@@ -71,8 +79,10 @@ int main(int argc, char *argv[]) {
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_printerr ("Error received from element %s: %s\n",
GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n",
debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
terminate = TRUE;
......@@ -85,9 +95,11 @@ int main(int argc, char *argv[]) {
/* We are only interested in state-changed messages from the pipeline */
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
gst_message_parse_state_changed (msg, &old_state, &new_state,
&pending_state);
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
gst_element_state_get_name (old_state),
gst_element_state_get_name (new_state));
}
break;
default:
......@@ -107,14 +119,17 @@ int main(int argc, char *argv[]) {
}
/* This function will be called by the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
static void
pad_added_handler (GstElement * src, GstPad * new_pad, CustomData * data)
{
GstPad *sink_pad = gst_element_get_static_pad (data->convert, "sink");
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad),
GST_ELEMENT_NAME (src));
/* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad)) {
......@@ -127,7 +142,8 @@ static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *dat
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
if (!g_str_has_prefix (new_pad_type, "audio/x-raw")) {
g_print ("It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type);
g_print ("It has type '%s' which is not raw audio. Ignoring.\n",
new_pad_type);
goto exit;
}
......
#include <gst/gst.h>
/* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData {
GstElement *playbin; /* Our one and only element */
gboolean playing; /* Are we in the PLAYING state? */
gboolean terminate; /* Should we terminate execution? */
gboolean seek_enabled; /* Is seeking enabled for this media? */
gboolean seek_done; /* Have we performed the seek already? */
gint64 duration; /* How long does this media last, in nanoseconds */
typedef struct _CustomData
{
GstElement *playbin; /* Our one and only element */
gboolean playing; /* Are we in the PLAYING state? */
gboolean terminate; /* Should we terminate execution? */
gboolean seek_enabled; /* Is seeking enabled for this media? */
gboolean seek_done; /* Have we performed the seek already? */
gint64 duration; /* How long does this media last, in nanoseconds */
} CustomData;
/* Forward definition of the message processing function */
static void handle_message (CustomData *data, GstMessage *msg);
static void handle_message (CustomData * data, GstMessage * msg);
int main(int argc, char *argv[]) {
int
main (int argc, char *argv[])
{
CustomData data;
GstBus *bus;
GstMessage *msg;
......@@ -37,7 +40,9 @@ int main(int argc, char *argv[]) {
}
/* Set the URI to play */
g_object_set (data.playbin, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
g_object_set (data.playbin, "uri",
"https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm",
NULL);
/* Start playing */
ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING);
......@@ -51,7 +56,8 @@ int main(int argc, char *argv[]) {
bus = gst_element_get_bus (data.playbin);
do {
msg = gst_bus_timed_pop_filtered (bus, 100 * GST_MSECOND,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_DURATION);
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS |
GST_MESSAGE_DURATION);
/* Parse message */
if (msg != NULL) {
......@@ -62,13 +68,15 @@ int main(int argc, char *argv[]) {
gint64 current = -1;
/* Query the current position of the stream */
if (!gst_element_query_position (data.playbin, GST_FORMAT_TIME, &current)) {
if (!gst_element_query_position (data.playbin, GST_FORMAT_TIME,
&current)) {
g_printerr ("Could not query current position.\n");
}
/* If we didn't know it yet, query the stream duration */
if (!GST_CLOCK_TIME_IS_VALID (data.duration)) {
if (!gst_element_query_duration (data.playbin, GST_FORMAT_TIME, &data.duration)) {
if (!gst_element_query_duration (data.playbin, GST_FORMAT_TIME,
&data.duration)) {
g_printerr ("Could not query current duration.\n");
}
}
......@@ -95,15 +103,19 @@ int main(int argc, char *argv[]) {
return 0;
}
static void handle_message (CustomData *data, GstMessage *msg) {
static void
handle_message (CustomData * data, GstMessage * msg)
{
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_printerr ("Error received from element %s: %s\n",
GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n",
debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
data->terminate = TRUE;
......@@ -116,12 +128,14 @@ static void handle_message (CustomData *data, GstMessage *msg) {
/* The duration has changed, mark the current one as invalid */
data->duration = GST_CLOCK_TIME_NONE;
break;
case GST_MESSAGE_STATE_CHANGED: {
case GST_MESSAGE_STATE_CHANGED:{
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
gst_message_parse_state_changed (msg, &old_state, &new_state,
&pending_state);
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) {
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
gst_element_state_get_name (old_state),
gst_element_state_get_name (new_state));
/* Remember whether we are in the PLAYING state or not */
data->playing = (new_state == GST_STATE_PLAYING);
......@@ -132,21 +146,23 @@ static void handle_message (CustomData *data, GstMessage *msg) {
gint64 start, end;
query = gst_query_new_seeking (GST_FORMAT_TIME);
if (gst_element_query (data->playbin, query)) {
gst_query_parse_seeking (query, NULL, &data->seek_enabled, &start, &end);
gst_query_parse_seeking (query, NULL, &data->seek_enabled, &start,
&end);
if (data->seek_enabled) {
g_print ("Seeking is ENABLED from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT "\n",
GST_TIME_ARGS (start), GST_TIME_ARGS (end));
g_print ("Seeking is ENABLED from %" GST_TIME_FORMAT " to %"