Commit 909baa23 authored by Thibault Saunier's avatar Thibault Saunier 🌵 Committed by Thibault Saunier
Browse files

Pass the code through codespell

parent cbd41106
Pipeline #60138 passed with stages
in 58 minutes and 44 seconds
......@@ -818,7 +818,7 @@ dnl GST_*: flags shared by built objects to link against GStreamer
dnl GST_PLUGINS_BASE_CFLAGS: to link internally against the plugins base libs
dnl (compare to other modules) or for i18n
dnl GST_ALL_LDFLAGS: linker flags shared by all
dnl GST_LIB_LDFLAGS: additional linker flags for all libaries
dnl GST_LIB_LDFLAGS: additional linker flags for all libraries
dnl GST_LT_LDFLAGS: library versioning of our libraries
dnl GST_PLUGIN_LDFLAGS: flags to be used for all plugins
......
......@@ -933,7 +933,7 @@ gst_alsasrc_get_timestamp (GstAlsaSrc * asrc)
return GST_CLOCK_TIME_NONE;
}
/* in case an xrun condition has occured we need to handle this */
/* in case an xrun condition has occurred we need to handle this */
if (snd_pcm_status_get_state (status) != SND_PCM_STATE_RUNNING) {
if (xrun_recovery (asrc, asrc->handle, err) < 0) {
GST_WARNING_OBJECT (asrc, "Could not recover from xrun condition !");
......
......@@ -75,7 +75,7 @@ gst_gl_effects_xray (GstGLEffects * effects)
gst_gl_filter_render_to_target_with_shader (filter, effects->intexture,
effects->midtexture[3], shader);
/* horizonal convolution */
/* horizontal convolution */
shader = gst_gl_effects_get_fragment_shader (effects, "sobel_hconv3",
sep_sobel_hconv3_fragment_source_gles2);
gst_gl_shader_use (shader);
......
......@@ -197,7 +197,7 @@ static const gchar *chroma_key_frag =
"void main () {\n"
" vec4 yuva;\n"
/* operations translated from alpha and tested with glvideomixer
* with one pad's paremeters blend-equation-rgb={subtract,reverse-subtract},
* with one pad's parameters blend-equation-rgb={subtract,reverse-subtract},
* blend-function-src-rgb=src-color and blend-function-dst-rgb=dst-color */
" vec4 rgba = texture2D (tex, v_texcoord);\n"
" yuva.xyz = rgb_to_yuv (rgba.rgb);\n"
......
......@@ -134,7 +134,7 @@ static const gchar color_balance_frag_templ[] =
"void main () {\n"
" vec3 yuv;\n"
/* operations translated from videobalanceand tested with glvideomixer
* with one pad's paremeters blend-equation-rgb={subtract,reverse-subtract},
* with one pad's parameters blend-equation-rgb={subtract,reverse-subtract},
* blend-function-src-rgb=src-color and blend-function-dst-rgb=dst-color */
" float hue_cos = cos (PI * hue);\n"
" float hue_sin = sin (PI * hue);\n"
......
......@@ -380,7 +380,7 @@ gst_gl_filter_reflected_screen_callback (gint width, gint height, guint texture,
glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
//load identity befor tracing
//load identity before tracing
glLoadIdentity ();
//camera translation
glTranslatef (0.0f, 0.1f, -1.3f);
......
......@@ -387,7 +387,7 @@ _set_uniform (GQuark field_id, const GValue * value, gpointer user_data)
#endif
} else {
/* FIXME: Add support for unsigned ints, non 4x4 matrices, etc */
GST_FIXME ("Don't know how to set the \'%s\' paramater. Unknown type",
GST_FIXME ("Don't know how to set the \'%s\' parameter. Unknown type",
field_name);
return TRUE;
}
......@@ -530,7 +530,7 @@ gst_gl_filtershader_hcallback (GstGLFilter * filter, GstGLMemory * in_tex,
gst_gl_shader_use (shader);
/* FIXME: propertise these */
/* FIXME: turn these into properties */
gst_gl_shader_set_uniform_1i (shader, "tex", 0);
gst_gl_shader_set_uniform_1f (shader, "width",
GST_VIDEO_INFO_WIDTH (&filter->out_info));
......@@ -538,7 +538,7 @@ gst_gl_filtershader_hcallback (GstGLFilter * filter, GstGLMemory * in_tex,
GST_VIDEO_INFO_HEIGHT (&filter->out_info));
gst_gl_shader_set_uniform_1f (shader, "time", filtershader->time);
/* FIXME: propertise these */
/* FIXME: turn these into properties */
filter->draw_attr_position_loc =
gst_gl_shader_get_attribute_location (shader, "a_position");
filter->draw_attr_texture_loc =
......
......@@ -2029,7 +2029,7 @@ gst_glimage_sink_thread_init_redisplay (GstGLImageSink * gl_sink)
frag_stage = gst_glsl_stage_new_default_fragment (gl_sink->context);
}
if (!vert_stage || !frag_stage) {
GST_ERROR_OBJECT (gl_sink, "Failed to retreive fragment shader for "
GST_ERROR_OBJECT (gl_sink, "Failed to retrieve fragment shader for "
"texture target");
if (vert_stage)
gst_object_unref (vert_stage);
......@@ -2240,7 +2240,7 @@ gst_glimage_sink_on_draw (GstGLImageSink * gl_sink)
gst_gl_sync_meta_wait (gl_sink->stored_sync_meta,
gst_gl_context_get_current ());
/* make sure that the environnement is clean */
/* make sure that the environment is clean */
gst_gl_context_clear_shader (gl_sink->context);
gl->BindTexture (gl_target, 0);
......
......@@ -363,7 +363,7 @@ gst_gl_stereo_mix_aggregate_frames (GstVideoAggregator * vagg,
GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg);
/* If we're operating in frame-by-frame mode, push
* the primary view now, and let the parent class
* push the remaining auxilliary view */
* push the remaining auxiliary view */
if (GST_VIDEO_INFO_MULTIVIEW_MODE (&vagg->info) ==
GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME) {
/* Transfer the timestamps video-agg put on the aux buffer */
......
......@@ -25,7 +25,7 @@
* pages.
* Before extracting the packets out of the ogg pages, we push the raw vorbis
* header packets to the decoder.
* We don't use the incomming timestamps but use the ganulepos on the ogg pages
* We don't use the incoming timestamps but use the ganulepos on the ogg pages
* directly.
* This parser only does ogg/vorbis for now.
*/
......
......@@ -632,7 +632,7 @@ gst_ogg_demux_chain_peer (GstOggPad * pad, ogg_packet * packet,
beyond = ipad->map.n_index
&& ipad->map.index[ipad->map.n_index - 1].offset >= length;
if (beyond) {
GST_WARNING_OBJECT (pad, "Index offsets beyong byte length");
GST_WARNING_OBJECT (pad, "Index offsets beyond byte length");
if (ipad->discont) {
/* hole - the index is most likely screwed up */
GST_WARNING_OBJECT (ogg, "Discarding entire index");
......@@ -1511,7 +1511,7 @@ gst_ogg_demux_estimate_bisection_target (GstOggDemux * ogg, float seek_quality)
GST_DEBUG_OBJECT (ogg, "Raw best guess: %" G_GINT64_FORMAT, best);
/* offset the guess down as we need to capture the start of the
page we are targetting - but only do so if we did not undershoot
page we are targeting - but only do so if we did not undershoot
last time, as we're likely to still do this time */
if (!ogg->seek_undershot) {
/* very small packets are packed on pages, so offset by at least
......@@ -4026,7 +4026,7 @@ gst_ogg_demux_bisect_forward_serialno (GstOggDemux * ogg,
"bisect begin: %" G_GINT64_FORMAT ", searched: %" G_GINT64_FORMAT
", end %" G_GINT64_FORMAT ", chain: %p", begin, searched, end, chain);
/* the below guards against garbage seperating the last and
/* the below guards against garbage separating the last and
* first pages of two links. */
while (searched < endsearched) {
gint64 bisect;
......
......@@ -97,7 +97,7 @@ struct _GstOggPad
gint64 first_granule; /* the granulepos of first page == first sample in next page */
GstClockTime first_time; /* the timestamp of the second page or granuletime of first page */
GstClockTime position; /* position when last push occured; used to detect when we
GstClockTime position; /* position when last push occurred; used to detect when we
* need to send a newsegment update event for sparse streams */
GList *continued;
......
......@@ -882,7 +882,7 @@ gst_base_text_overlay_negotiate (GstBaseTextOverlay * overlay, GstCaps * caps)
}
if (upstream_has_meta || caps_has_meta) {
/* Send caps immediatly, it's needed by GstBaseTransform to get a reply
/* Send caps immediately, it's needed by GstBaseTransform to get a reply
* from allocation query */
ret = gst_pad_set_caps (overlay->srcpad, overlay_caps);
......@@ -1855,7 +1855,7 @@ gst_base_text_overlay_render_pangocairo (GstBaseTextOverlay * overlay,
overlay->ink_rect.y = tmp.x;
overlay->ink_rect.width = tmp.height;
overlay->ink_rect.height = tmp.width;
/* We want the top left corect, but we now have the top right */
/* We want the top left correct, but we now have the top right */
overlay->ink_rect.x += overlay->ink_rect.width;
tmp = overlay->logical_rect;
......@@ -1901,7 +1901,7 @@ gst_base_text_overlay_render_pangocairo (GstBaseTextOverlay * overlay,
if (overlay->use_vertical_render) {
gint tmp;
/* tranlate to the center of the image, rotate, and tranlate the rotated
/* translate to the center of the image, rotate, and translate the rotated
* image back to the right place */
cairo_matrix_translate (&cairo_matrix, unscaled_height / 2.0l,
unscaled_width / 2.0l);
......
......@@ -70,7 +70,7 @@ struct _GstTheoraDec
GstVideoCodecState *input_state;
GstVideoCodecState *output_state;
/* telemetry debuging options */
/* telemetry debugging options */
gint telemetry_mv;
gint telemetry_mbmode;
gint telemetry_qi;
......
......@@ -879,7 +879,7 @@ theora_enc_handle_frame (GstVideoEncoder * benc, GstVideoCodecFrame * frame)
enc = GST_THEORA_ENC (benc);
/* we keep track of two timelines.
* - The timestamps from the incomming buffers, which we copy to the outgoing
* - The timestamps from the incoming buffers, which we copy to the outgoing
* encoded buffers as-is. We need to do this as we simply forward the
* newsegment events.
* - The running_time of the buffers, which we use to construct the granulepos
......
......@@ -31,7 +31,7 @@
* elements.
*
* Applications can set the tags to write using the #GstTagSetter interface.
* Tags contained withing the vorbis bitstream will be picked up
* Tags contained within the vorbis bitstream will be picked up
* automatically (and merged according to the merge mode set via the tag
* setter interface).
*
......
......@@ -41,17 +41,17 @@ G_BEGIN_DECLS
*
* When the memory is mappable for read and write requests then it is assumes
* to be a fast path and so this caps feature should not be used. Though
* according to the dma-buf protocal, while it is mapped it prevents the
* according to the dma-buf protocol, while it is mapped it prevents the
* exporter to migrate the buffer.
*
* This caps feature should not serve at all the purpose of selecting the
* @GST_ALLOCATOR_DMABUF allocator during caps negotiation.
* When the exporter is the upstream element from the importer point of view,
* the exporter should try to map the dma buffer at runtime (preferrably during
* the exporter should try to map the dma buffer at runtime (preferably during
* decide_allocation phase). When it succeeds for #GST_MAP_READWRITE this caps
* feature should not be used. This allows scalers, color converts and any image
* processing filters to work directly on the dma buffer.
* In this case the importer element should check all incomming memory using
* In this case the importer element should check all incoming memory using
* gst_is_dmabuf_memory().
*
* Since: 1.12
......
......@@ -1201,7 +1201,7 @@ gst_app_sink_set_emit_signals (GstAppSink * appsink, gboolean emit)
*
* Check if appsink will emit the "new-preroll" and "new-sample" signals.
*
* Returns: %TRUE if @appsink is emiting the "new-preroll" and "new-sample"
* Returns: %TRUE if @appsink is emitting the "new-preroll" and "new-sample"
* signals.
*/
gboolean
......
......@@ -350,7 +350,7 @@ gst_app_src_class_init (GstAppSrcClass * klass)
/**
* GstAppSrc::max-latency:
*
* The maximum latency of the source. A value of -1 means an unlimited amout
* The maximum latency of the source. A value of -1 means an unlimited amount
* of latency.
*/
g_object_class_install_property (gobject_class, PROP_MAX_LATENCY,
......@@ -1937,9 +1937,9 @@ gst_app_src_push_sample_internal (GstAppSrc * appsrc, GstSample * sample)
* When the block property is TRUE, this function can block until free
* space becomes available in the queue.
*
* Returns: #GST_FLOW_OK when the buffer was successfuly queued.
* Returns: #GST_FLOW_OK when the buffer was successfully queued.
* #GST_FLOW_FLUSHING when @appsrc is not PAUSED or PLAYING.
* #GST_FLOW_EOS when EOS occured.
* #GST_FLOW_EOS when EOS occurred.
*/
GstFlowReturn
gst_app_src_push_buffer (GstAppSrc * appsrc, GstBuffer * buffer)
......@@ -1959,9 +1959,9 @@ gst_app_src_push_buffer (GstAppSrc * appsrc, GstBuffer * buffer)
* When the block property is TRUE, this function can block until free
* space becomes available in the queue.
*
* Returns: #GST_FLOW_OK when the buffer list was successfuly queued.
* Returns: #GST_FLOW_OK when the buffer list was successfully queued.
* #GST_FLOW_FLUSHING when @appsrc is not PAUSED or PLAYING.
* #GST_FLOW_EOS when EOS occured.
* #GST_FLOW_EOS when EOS occurred.
*
* Since: 1.14
*/
......@@ -1988,9 +1988,9 @@ gst_app_src_push_buffer_list (GstAppSrc * appsrc, GstBufferList * buffer_list)
* When the block property is TRUE, this function can block until free
* space becomes available in the queue.
*
* Returns: #GST_FLOW_OK when the buffer was successfuly queued.
* Returns: #GST_FLOW_OK when the buffer was successfully queued.
* #GST_FLOW_FLUSHING when @appsrc is not PAUSED or PLAYING.
* #GST_FLOW_EOS when EOS occured.
* #GST_FLOW_EOS when EOS occurred.
*
* Since: 1.6
*
......@@ -2033,7 +2033,7 @@ gst_app_src_push_sample_action (GstAppSrc * appsrc, GstSample * sample)
* Indicates to the appsrc element that the last buffer queued in the
* element is the last buffer of the stream.
*
* Returns: #GST_FLOW_OK when the EOS was successfuly queued.
* Returns: #GST_FLOW_OK when the EOS was successfully queued.
* #GST_FLOW_FLUSHING when @appsrc is not PAUSED or PLAYING.
*/
GstFlowReturn
......
......@@ -87,7 +87,7 @@ G_BEGIN_DECLS
* This is expressed in caps by having a channel mask with no bits set.
*
* As another special case it is allowed to have two channels without a channel mask.
* This implicitely means that this is a stereo stream with a front left and front right
* This implicitly means that this is a stereo stream with a front left and front right
* channel.
*/
typedef enum {
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment