Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
What's new
7
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Open sidebar
Tim-Philipp Müller
gst-plugins-bad
Commits
2ea10c1f
Commit
2ea10c1f
authored
Feb 23, 2019
by
Tim-Philipp Müller
🐠
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
webrtcdsp: indent C++ sources
parent
73d5b642
Pipeline
#20897
passed with stages
in 25 minutes and 5 seconds
Changes
2
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
200 additions
and
108 deletions
+200
-108
ext/webrtcdsp/gstwebrtcdsp.cpp
ext/webrtcdsp/gstwebrtcdsp.cpp
+196
-103
ext/webrtcdsp/gstwebrtcechoprobe.cpp
ext/webrtcdsp/gstwebrtcechoprobe.cpp
+4
-5
No files found.
ext/webrtcdsp/gstwebrtcdsp.cpp
View file @
2ea10c1f
...
...
@@ -88,7 +88,7 @@ GST_DEBUG_CATEGORY (webrtc_dsp_debug);
#define DEFAULT_VOICE_DETECTION_LIKELIHOOD webrtc::VoiceDetection::kLowLikelihood
static
GstStaticPadTemplate
gst_webrtc_dsp_sink_template
=
GST_STATIC_PAD_TEMPLATE
(
"sink"
,
GST_STATIC_PAD_TEMPLATE
(
"sink"
,
GST_PAD_SINK
,
GST_PAD_ALWAYS
,
GST_STATIC_CAPS
(
"audio/x-raw, "
...
...
@@ -104,7 +104,7 @@ GST_STATIC_PAD_TEMPLATE ("sink",
);
static
GstStaticPadTemplate
gst_webrtc_dsp_src_template
=
GST_STATIC_PAD_TEMPLATE
(
"src"
,
GST_STATIC_PAD_TEMPLATE
(
"src"
,
GST_PAD_SRC
,
GST_PAD_ALWAYS
,
GST_STATIC_CAPS
(
"audio/x-raw, "
...
...
@@ -119,17 +119,24 @@ GST_STATIC_PAD_TEMPLATE ("src",
"channels = (int) [1, MAX]"
)
);
typedef
webrtc
::
EchoCancellation
::
SuppressionLevel
GstWebrtcEchoSuppressionLevel
;
typedef
webrtc
::
EchoCancellation
::
SuppressionLevel
GstWebrtcEchoSuppressionLevel
;
#define GST_TYPE_WEBRTC_ECHO_SUPPRESSION_LEVEL \
(gst_webrtc_echo_suppression_level_get_type ())
static
GType
static
GType
gst_webrtc_echo_suppression_level_get_type
(
void
)
{
static
GType
suppression_level_type
=
0
;
static
const
GEnumValue
level_types
[]
=
{
static
GType
suppression_level_type
=
0
;
static
const
GEnumValue
level_types
[]
=
{
{
webrtc
::
EchoCancellation
::
kLowSuppression
,
"Low Suppression"
,
"low"
},
{
webrtc
::
EchoCancellation
::
kModerateSuppression
,
"Moderate Suppression"
,
"moderate"
},
"Moderate Suppression"
,
"moderate"
},
{
webrtc
::
EchoCancellation
::
kHighSuppression
,
"high Suppression"
,
"high"
},
{
0
,
NULL
,
NULL
}
};
...
...
@@ -141,19 +148,26 @@ gst_webrtc_echo_suppression_level_get_type (void)
return
suppression_level_type
;
}
typedef
webrtc
::
NoiseSuppression
::
Level
GstWebrtcNoiseSuppressionLevel
;
typedef
webrtc
::
NoiseSuppression
::
Level
GstWebrtcNoiseSuppressionLevel
;
#define GST_TYPE_WEBRTC_NOISE_SUPPRESSION_LEVEL \
(gst_webrtc_noise_suppression_level_get_type ())
static
GType
static
GType
gst_webrtc_noise_suppression_level_get_type
(
void
)
{
static
GType
suppression_level_type
=
0
;
static
const
GEnumValue
level_types
[]
=
{
static
GType
suppression_level_type
=
0
;
static
const
GEnumValue
level_types
[]
=
{
{
webrtc
::
NoiseSuppression
::
kLow
,
"Low Suppression"
,
"low"
},
{
webrtc
::
NoiseSuppression
::
kModerate
,
"Moderate Suppression"
,
"moderate"
},
{
webrtc
::
NoiseSuppression
::
kHigh
,
"High Suppression"
,
"high"
},
{
webrtc
::
NoiseSuppression
::
kVeryHigh
,
"Very High Suppression"
,
"very-high"
},
"very-high"
},
{
0
,
NULL
,
NULL
}
};
...
...
@@ -164,15 +178,23 @@ gst_webrtc_noise_suppression_level_get_type (void)
return
suppression_level_type
;
}
typedef
webrtc
::
GainControl
::
Mode
GstWebrtcGainControlMode
;
typedef
webrtc
::
GainControl
::
Mode
GstWebrtcGainControlMode
;
#define GST_TYPE_WEBRTC_GAIN_CONTROL_MODE \
(gst_webrtc_gain_control_mode_get_type ())
static
GType
static
GType
gst_webrtc_gain_control_mode_get_type
(
void
)
{
static
GType
gain_control_mode_type
=
0
;
static
const
GEnumValue
mode_types
[]
=
{
{
webrtc
::
GainControl
::
kAdaptiveDigital
,
"Adaptive Digital"
,
"adaptive-digital"
},
static
GType
gain_control_mode_type
=
0
;
static
const
GEnumValue
mode_types
[]
=
{
{
webrtc
::
GainControl
::
kAdaptiveDigital
,
"Adaptive Digital"
,
"adaptive-digital"
},
{
webrtc
::
GainControl
::
kFixedDigital
,
"Fixed Digital"
,
"fixed-digital"
},
{
0
,
NULL
,
NULL
}
};
...
...
@@ -184,24 +206,34 @@ gst_webrtc_gain_control_mode_get_type (void)
return
gain_control_mode_type
;
}
typedef
webrtc
::
VoiceDetection
::
Likelihood
GstWebrtcVoiceDetectionLikelihood
;
typedef
webrtc
::
VoiceDetection
::
Likelihood
GstWebrtcVoiceDetectionLikelihood
;
#define GST_TYPE_WEBRTC_VOICE_DETECTION_LIKELIHOOD \
(gst_webrtc_voice_detection_likelihood_get_type ())
static
GType
static
GType
gst_webrtc_voice_detection_likelihood_get_type
(
void
)
{
static
GType
likelihood_type
=
0
;
static
const
GEnumValue
likelihood_types
[]
=
{
{
webrtc
::
VoiceDetection
::
kVeryLowLikelihood
,
"Very Low Likelihood"
,
"very-low"
},
static
GType
likelihood_type
=
0
;
static
const
GEnumValue
likelihood_types
[]
=
{
{
webrtc
::
VoiceDetection
::
kVeryLowLikelihood
,
"Very Low Likelihood"
,
"very-low"
},
{
webrtc
::
VoiceDetection
::
kLowLikelihood
,
"Low Likelihood"
,
"low"
},
{
webrtc
::
VoiceDetection
::
kModerateLikelihood
,
"Moderate Likelihood"
,
"moderate"
},
{
webrtc
::
VoiceDetection
::
kModerateLikelihood
,
"Moderate Likelihood"
,
"moderate"
},
{
webrtc
::
VoiceDetection
::
kHighLikelihood
,
"High Likelihood"
,
"high"
},
{
0
,
NULL
,
NULL
}
};
if
(
!
likelihood_type
)
{
likelihood_type
=
g_enum_register_static
(
"GstWebrtcVoiceDetectionLikelihood"
,
likelihood_types
);
g_enum_register_static
(
"GstWebrtcVoiceDetectionLikelihood"
,
likelihood_types
);
}
return
likelihood_type
;
}
...
...
@@ -236,42 +268,70 @@ enum
*/
struct
_GstWebrtcDsp
{
GstAudioFilter
element
;
GstAudioFilter
element
;
/* Protected by the object lock */
GstAudioInfo
info
;
gboolean
interleaved
;
guint
period_size
;
guint
period_samples
;
gboolean
stream_has_voice
;
GstAudioInfo
info
;
gboolean
interleaved
;
guint
period_size
;
guint
period_samples
;
gboolean
stream_has_voice
;
/* Protected by the stream lock */
GstAdapter
*
adapter
;
GstPlanarAudioAdapter
*
padapter
;
webrtc
::
AudioProcessing
*
apm
;
GstAdapter
*
adapter
;
GstPlanarAudioAdapter
*
padapter
;
webrtc
::
AudioProcessing
*
apm
;
/* Protected by the object lock */
gchar
*
probe_name
;
GstWebrtcEchoProbe
*
probe
;
gchar
*
probe_name
;
GstWebrtcEchoProbe
*
probe
;
/* Properties */
gboolean
high_pass_filter
;
gboolean
echo_cancel
;
webrtc
::
EchoCancellation
::
SuppressionLevel
echo_suppression_level
;
gboolean
noise_suppression
;
webrtc
::
NoiseSuppression
::
Level
noise_suppression_level
;
gboolean
gain_control
;
gboolean
experimental_agc
;
gboolean
extended_filter
;
gboolean
delay_agnostic
;
gint
target_level_dbfs
;
gint
compression_gain_db
;
gint
startup_min_volume
;
gboolean
limiter
;
webrtc
::
GainControl
::
Mode
gain_control_mode
;
gboolean
voice_detection
;
gint
voice_detection_frame_size_ms
;
webrtc
::
VoiceDetection
::
Likelihood
voice_detection_likelihood
;
gboolean
high_pass_filter
;
gboolean
echo_cancel
;
webrtc
::
EchoCancellation
::
SuppressionLevel
echo_suppression_level
;
gboolean
noise_suppression
;
webrtc
::
NoiseSuppression
::
Level
noise_suppression_level
;
gboolean
gain_control
;
gboolean
experimental_agc
;
gboolean
extended_filter
;
gboolean
delay_agnostic
;
gint
target_level_dbfs
;
gint
compression_gain_db
;
gint
startup_min_volume
;
gboolean
limiter
;
webrtc
::
GainControl
::
Mode
gain_control_mode
;
gboolean
voice_detection
;
gint
voice_detection_frame_size_ms
;
webrtc
::
VoiceDetection
::
Likelihood
voice_detection_likelihood
;
};
G_DEFINE_TYPE
(
GstWebrtcDsp
,
gst_webrtc_dsp
,
GST_TYPE_AUDIO_FILTER
);
...
...
@@ -279,7 +339,8 @@ G_DEFINE_TYPE (GstWebrtcDsp, gst_webrtc_dsp, GST_TYPE_AUDIO_FILTER);
static
const
gchar
*
webrtc_error_to_string
(
gint
err
)
{
const
gchar
*
str
=
"unkown error"
;
const
gchar
*
str
=
"unkown error"
;
switch
(
err
)
{
case
webrtc
::
AudioProcessing
::
kNoError
:
...
...
@@ -331,7 +392,8 @@ webrtc_error_to_string (gint err)
static
GstBuffer
*
gst_webrtc_dsp_take_buffer
(
GstWebrtcDsp
*
self
)
{
GstBuffer
*
buffer
;
GstBuffer
*
buffer
;
GstClockTime
timestamp
;
guint64
distance
;
gboolean
at_discont
;
...
...
@@ -343,7 +405,8 @@ gst_webrtc_dsp_take_buffer (GstWebrtcDsp * self)
timestamp
=
gst_planar_audio_adapter_prev_pts
(
self
->
padapter
,
&
distance
);
}
timestamp
+=
gst_util_uint64_scale_int
(
distance
,
GST_SECOND
,
self
->
info
.
rate
);
timestamp
+=
gst_util_uint64_scale_int
(
distance
,
GST_SECOND
,
self
->
info
.
rate
);
if
(
self
->
interleaved
)
{
buffer
=
gst_adapter_take_buffer
(
self
->
adapter
,
self
->
period_size
);
...
...
@@ -367,14 +430,17 @@ gst_webrtc_dsp_take_buffer (GstWebrtcDsp * self)
return
buffer
;
}
static
GstFlowReturn
static
GstFlowReturn
gst_webrtc_dsp_analyze_reverse_stream
(
GstWebrtcDsp
*
self
,
GstClockTime
rec_time
)
{
GstWebrtcEchoProbe
*
probe
=
NULL
;
GstWebrtcEchoProbe
*
probe
=
NULL
;
webrtc
::
AudioProcessing
*
apm
;
webrtc
::
AudioFrame
frame
;
GstBuffer
*
buf
=
NULL
;
GstBuffer
*
buf
=
NULL
;
GstFlowReturn
ret
=
GST_FLOW_OK
;
gint
err
,
delay
;
...
...
@@ -393,7 +459,8 @@ gst_webrtc_dsp_analyze_reverse_stream (GstWebrtcDsp * self,
rec_time
=
GST_CLOCK_TIME_NONE
;
again:
delay
=
gst_webrtc_echo_probe_read
(
probe
,
rec_time
,
(
gpointer
)
&
frame
,
&
buf
);
delay
=
gst_webrtc_echo_probe_read
(
probe
,
rec_time
,
(
gpointer
)
&
frame
,
&
buf
);
apm
->
set_stream_delay_ms
(
delay
);
if
(
delay
<
0
)
...
...
@@ -402,8 +469,8 @@ again:
if
(
frame
.
sample_rate_hz_
!=
self
->
info
.
rate
)
{
GST_ELEMENT_ERROR
(
self
,
STREAM
,
FORMAT
,
(
"Echo Probe has rate %i , while the DSP is running at rate %i,"
" use a caps filter to ensure those are the same."
,
frame
.
sample_rate_hz_
,
self
->
info
.
rate
),
(
NULL
));
" use a caps filter to ensure those are the same."
,
frame
.
sample_rate_hz_
,
self
->
info
.
rate
),
(
NULL
));
ret
=
GST_FLOW_ERROR
;
goto
done
;
}
...
...
@@ -412,10 +479,11 @@ again:
webrtc
::
StreamConfig
config
(
frame
.
sample_rate_hz_
,
frame
.
num_channels_
,
false
);
GstAudioBuffer
abuf
;
float
*
const
*
data
;
float
*
const
*
data
;
gst_audio_buffer_map
(
&
abuf
,
&
self
->
info
,
buf
,
GST_MAP_READWRITE
);
data
=
(
float
*
const
*
)
abuf
.
planes
;
data
=
(
float
*
const
*
)
abuf
.
planes
;
if
((
err
=
apm
->
ProcessReverseStream
(
data
,
config
,
config
,
data
))
<
0
)
GST_WARNING_OBJECT
(
self
,
"Reverse stream analyses failed: %s."
,
webrtc_error_to_string
(
err
));
...
...
@@ -428,7 +496,7 @@ again:
}
if
(
self
->
delay_agnostic
)
goto
again
;
goto
again
;
done:
gst_object_unref
(
probe
);
...
...
@@ -438,11 +506,13 @@ done:
}
static
void
gst_webrtc_vad_post_message
(
GstWebrtcDsp
*
self
,
GstClockTime
timestamp
,
gst_webrtc_vad_post_message
(
GstWebrtcDsp
*
self
,
GstClockTime
timestamp
,
gboolean
stream_has_voice
)
{
GstBaseTransform
*
trans
=
GST_BASE_TRANSFORM_CAST
(
self
);
GstStructure
*
s
;
GstBaseTransform
*
trans
=
GST_BASE_TRANSFORM_CAST
(
self
);
GstStructure
*
s
;
GstClockTime
stream_time
;
stream_time
=
gst_segment_to_stream_time
(
&
trans
->
segment
,
GST_FORMAT_TIME
,
...
...
@@ -459,9 +529,9 @@ gst_webrtc_vad_post_message (GstWebrtcDsp *self, GstClockTime timestamp,
gst_message_new_element
(
GST_OBJECT
(
self
),
s
));
}
static
GstFlowReturn
gst_webrtc_dsp_process_stream
(
GstWebrtcDsp
*
self
,
GstBuffer
*
buffer
)
static
GstFlowReturn
gst_webrtc_dsp_process_stream
(
GstWebrtcDsp
*
self
,
GstBuffer
*
buffer
)
{
GstAudioBuffer
abuf
;
webrtc
::
AudioProcessing
*
apm
=
self
->
apm
;
...
...
@@ -484,7 +554,8 @@ gst_webrtc_dsp_process_stream (GstWebrtcDsp * self,
if
(
err
>=
0
)
memcpy
(
abuf
.
planes
[
0
],
frame
.
data_
,
self
->
period_size
);
}
else
{
float
*
const
*
data
=
(
float
*
const
*
)
abuf
.
planes
;
float
*
const
*
data
=
(
float
*
const
*
)
abuf
.
planes
;
webrtc
::
StreamConfig
config
(
self
->
info
.
rate
,
self
->
info
.
channels
,
false
);
err
=
apm
->
ProcessStream
(
data
,
config
,
config
,
data
);
...
...
@@ -498,7 +569,8 @@ gst_webrtc_dsp_process_stream (GstWebrtcDsp * self,
gboolean
stream_has_voice
=
apm
->
voice_detection
()
->
stream_has_voice
();
if
(
stream_has_voice
!=
self
->
stream_has_voice
)
gst_webrtc_vad_post_message
(
self
,
GST_BUFFER_PTS
(
buffer
),
stream_has_voice
);
gst_webrtc_vad_post_message
(
self
,
GST_BUFFER_PTS
(
buffer
),
stream_has_voice
);
self
->
stream_has_voice
=
stream_has_voice
;
}
...
...
@@ -509,19 +581,20 @@ gst_webrtc_dsp_process_stream (GstWebrtcDsp * self,
return
GST_FLOW_OK
;
}
static
GstFlowReturn
static
GstFlowReturn
gst_webrtc_dsp_submit_input_buffer
(
GstBaseTransform
*
btrans
,
gboolean
is_discont
,
GstBuffer
*
buffer
)
{
GstWebrtcDsp
*
self
=
GST_WEBRTC_DSP
(
btrans
);
GstWebrtcDsp
*
self
=
GST_WEBRTC_DSP
(
btrans
);
buffer
=
gst_buffer_make_writable
(
buffer
);
GST_BUFFER_PTS
(
buffer
)
=
gst_segment_to_running_time
(
&
btrans
->
segment
,
GST_FORMAT_TIME
,
GST_BUFFER_PTS
(
buffer
));
if
(
is_discont
)
{
GST_DEBUG_OBJECT
(
self
,
"Received discont, clearing adapter."
);
GST_DEBUG_OBJECT
(
self
,
"Received discont, clearing adapter."
);
if
(
self
->
interleaved
)
gst_adapter_clear
(
self
->
adapter
);
else
...
...
@@ -536,10 +609,12 @@ gst_webrtc_dsp_submit_input_buffer (GstBaseTransform * btrans,
return
GST_FLOW_OK
;
}
static
GstFlowReturn
static
GstFlowReturn
gst_webrtc_dsp_generate_output
(
GstBaseTransform
*
btrans
,
GstBuffer
**
outbuf
)
{
GstWebrtcDsp
*
self
=
GST_WEBRTC_DSP
(
btrans
);
GstWebrtcDsp
*
self
=
GST_WEBRTC_DSP
(
btrans
);
GstFlowReturn
ret
;
gboolean
not_enough
;
...
...
@@ -563,10 +638,12 @@ gst_webrtc_dsp_generate_output (GstBaseTransform * btrans, GstBuffer ** outbuf)
return
ret
;
}
static
gboolean
static
gboolean
gst_webrtc_dsp_start
(
GstBaseTransform
*
btrans
)
{
GstWebrtcDsp
*
self
=
GST_WEBRTC_DSP
(
btrans
);
GstWebrtcDsp
*
self
=
GST_WEBRTC_DSP
(
btrans
);
webrtc
::
Config
config
;
GST_OBJECT_LOCK
(
self
);
...
...
@@ -574,7 +651,8 @@ gst_webrtc_dsp_start (GstBaseTransform * btrans)
config
.
Set
<
webrtc
::
ExtendedFilter
>
(
new
webrtc
::
ExtendedFilter
(
self
->
extended_filter
));
config
.
Set
<
webrtc
::
ExperimentalAgc
>
(
new
webrtc
::
ExperimentalAgc
(
self
->
experimental_agc
,
self
->
startup_min_volume
));
(
new
webrtc
::
ExperimentalAgc
(
self
->
experimental_agc
,
self
->
startup_min_volume
));
config
.
Set
<
webrtc
::
DelayAgnostic
>
(
new
webrtc
::
DelayAgnostic
(
self
->
delay_agnostic
));
...
...
@@ -598,10 +676,12 @@ gst_webrtc_dsp_start (GstBaseTransform * btrans)
return
TRUE
;
}
static
gboolean
static
gboolean
gst_webrtc_dsp_setup
(
GstAudioFilter
*
filter
,
const
GstAudioInfo
*
info
)
{
GstWebrtcDsp
*
self
=
GST_WEBRTC_DSP
(
filter
);
GstWebrtcDsp
*
self
=
GST_WEBRTC_DSP
(
filter
);
webrtc
::
AudioProcessing
*
apm
;
webrtc
::
ProcessingConfig
pconfig
;
GstAudioInfo
probe_info
=
*
info
;
...
...
@@ -679,7 +759,8 @@ gst_webrtc_dsp_setup (GstAudioFilter * filter, const GstAudioInfo * info)
}
if
(
self
->
gain_control
)
{
GEnumClass
*
mode_class
=
(
GEnumClass
*
)
GEnumClass
*
mode_class
=
(
GEnumClass
*
)
g_type_class_ref
(
GST_TYPE_WEBRTC_GAIN_CONTROL_MODE
);
GST_DEBUG_OBJECT
(
self
,
"Enabling Digital Gain Control, target level "
...
...
@@ -698,20 +779,21 @@ gst_webrtc_dsp_setup (GstAudioFilter * filter, const GstAudioInfo * info)
}
if
(
self
->
voice_detection
)
{
GEnumClass
*
likelihood_class
=
(
GEnumClass
*
)
GEnumClass
*
likelihood_class
=
(
GEnumClass
*
)
g_type_class_ref
(
GST_TYPE_WEBRTC_VOICE_DETECTION_LIKELIHOOD
);
GST_DEBUG_OBJECT
(
self
,
"Enabling Voice Activity Detection, frame size "
"%d milliseconds, likelihood: %s"
,
self
->
voice_detection_frame_size_ms
,
g_enum_get_value
(
likelihood_class
,
self
->
voice_detection_likelihood
)
->
value_name
);
"%d milliseconds, likelihood: %s"
,
self
->
voice_detection_frame_size_ms
,
g_enum_get_value
(
likelihood_class
,
self
->
voice_detection_likelihood
)
->
value_name
);
g_type_class_unref
(
likelihood_class
);
self
->
stream_has_voice
=
FALSE
;
apm
->
voice_detection
()
->
Enable
(
true
);
apm
->
voice_detection
()
->
set_likelihood
(
self
->
voice_detection_likelihood
);
apm
->
voice_detection
()
->
set_frame_size_ms
(
self
->
voice_detection_frame_size_ms
);
apm
->
voice_detection
()
->
set_frame_size_ms
(
self
->
voice_detection_frame_size_ms
);
}
GST_OBJECT_UNLOCK
(
self
);
...
...
@@ -744,10 +826,12 @@ initialize_failed:
return
FALSE
;
}
static
gboolean
static
gboolean
gst_webrtc_dsp_stop
(
GstBaseTransform
*
btrans
)
{
GstWebrtcDsp
*
self
=
GST_WEBRTC_DSP
(
btrans
);
GstWebrtcDsp
*
self
=
GST_WEBRTC_DSP
(
btrans
);
GST_OBJECT_LOCK
(
self
);
...
...
@@ -771,7 +855,8 @@ static void
gst_webrtc_dsp_set_property
(
GObject
*
object
,
guint
prop_id
,
const
GValue
*
value
,
GParamSpec
*
pspec
)
{
GstWebrtcDsp
*
self
=
GST_WEBRTC_DSP
(
object
);
GstWebrtcDsp
*
self
=
GST_WEBRTC_DSP
(
object
);
GST_OBJECT_LOCK
(
self
);
switch
(
prop_id
)
{
...
...
@@ -845,7 +930,8 @@ static void
gst_webrtc_dsp_get_property
(
GObject
*
object
,
guint
prop_id
,
GValue
*
value
,
GParamSpec
*
pspec
)
{
GstWebrtcDsp
*
self
=
GST_WEBRTC_DSP
(
object
);
GstWebrtcDsp
*
self
=
GST_WEBRTC_DSP
(
object
);
GST_OBJECT_LOCK
(
self
);
switch
(
prop_id
)
{
...
...
@@ -914,7 +1000,8 @@ gst_webrtc_dsp_get_property (GObject * object,
static
void
gst_webrtc_dsp_finalize
(
GObject
*
object
)
{
GstWebrtcDsp
*
self
=
GST_WEBRTC_DSP
(
object
);
GstWebrtcDsp
*
self
=
GST_WEBRTC_DSP
(
object
);
gst_object_unref
(
self
->
adapter
);
gst_object_unref
(
self
->
padapter
);
...
...
@@ -934,10 +1021,14 @@ gst_webrtc_dsp_init (GstWebrtcDsp * self)
static
void
gst_webrtc_dsp_class_init
(
GstWebrtcDspClass
*
klass
)
{
GObjectClass
*
gobject_class
=
G_OBJECT_CLASS
(
klass
);
GstElementClass
*
element_class
=
GST_ELEMENT_CLASS
(
klass
);
GstBaseTransformClass
*
btrans_class
=
GST_BASE_TRANSFORM_CLASS
(
klass
);
GstAudioFilterClass
*
audiofilter_class
=
GST_AUDIO_FILTER_CLASS
(
klass
);
GObjectClass
*
gobject_class
=
G_OBJECT_CLASS
(
klass
);
GstElementClass
*
element_class
=
GST_ELEMENT_CLASS
(
klass
);
GstBaseTransformClass
*
btrans_class
=
GST_BASE_TRANSFORM_CLASS
(
klass
);
GstAudioFilterClass
*
audiofilter_class
=
GST_AUDIO_FILTER_CLASS
(
klass
);
gobject_class
->
finalize
=
GST_DEBUG_FUNCPTR
(
gst_webrtc_dsp_finalize
);
gobject_class
->
set_property
=
GST_DEBUG_FUNCPTR
(
gst_webrtc_dsp_set_property
);
...
...
@@ -1053,7 +1144,7 @@ gst_webrtc_dsp_class_init (GstWebrtcDspClass * klass)
PROP_COMPRESSION_GAIN_DB
,
g_param_spec_int
(
"compression-gain-db"
,
"Compression Gain dB"
,
"Sets the maximum |gain| the digital compression stage may apply, "
"in dB."
,
"in dB."
,
0
,
90
,
DEFAULT_COMPRESSION_GAIN_DB
,
(
GParamFlags
)
(
G_PARAM_READWRITE
|
G_PARAM_STATIC_STRINGS
|
G_PARAM_CONSTRUCT
)));
...
...
@@ -1063,8 +1154,9 @@ gst_webrtc_dsp_class_init (GstWebrtcDspClass * klass)
"At startup the experimental AGC moves the microphone volume up to "
"|startup_min_volume| if the current microphone volume is set too "
"low. No effect if experimental-agc isn't enabled."
,
12
,
255
,
DEFAULT_STARTUP_MIN_VOLUME
,
(
GParamFlags
)
(
G_PARAM_READWRITE
|
G_PARAM_STATIC_STRINGS
|
G_PARAM_CONSTRUCT
)));
12
,
255
,
DEFAULT_STARTUP_MIN_VOLUME
,
(
GParamFlags
)
(
G_PARAM_READWRITE
|
G_PARAM_STATIC_STRINGS
|
G_PARAM_CONSTRUCT
)));
g_object_class_install_property
(
gobject_class
,
PROP_LIMITER
,
...
...
@@ -1115,7 +1207,8 @@ gst_webrtc_dsp_class_init (GstWebrtcDspClass * klass)
}
static
gboolean
static
gboolean
plugin_init
(
GstPlugin
*
plugin
)
{
GST_DEBUG_CATEGORY_INIT
...
...
ext/webrtcdsp/gstwebrtcechoprobe.cpp
View file @
2ea10c1f
...
...
@@ -42,7 +42,7 @@ GST_DEBUG_CATEGORY_EXTERN (webrtc_dsp_debug);
#define MAX_ADAPTER_SIZE (1*1024*1024)
static
GstStaticPadTemplate
gst_webrtc_echo_probe_sink_template
=
GST_STATIC_PAD_TEMPLATE
(
"sink"
,
GST_STATIC_PAD_TEMPLATE
(
"sink"
,
GST_PAD_SINK
,
GST_PAD_ALWAYS
,
GST_STATIC_CAPS
(
"audio/x-raw, "
...
...
@@ -58,7 +58,7 @@ GST_STATIC_PAD_TEMPLATE ("sink",
);
static
GstStaticPadTemplate
gst_webrtc_echo_probe_src_template
=
GST_STATIC_PAD_TEMPLATE
(
"src"
,
GST_STATIC_PAD_TEMPLATE
(
"src"
,
GST_PAD_SRC
,
GST_PAD_ALWAYS
,
GST_STATIC_CAPS
(
"audio/x-raw, "
...
...
@@ -426,13 +426,12 @@ copy:
NULL
);
}
else
{
ret
=
gst_planar_audio_adapter_take_buffer
(
self
->
padapter
,
size
,
GST_MAP_READWRITE
);
GST_MAP_READWRITE
);
}
}
else
{
ret
=
gst_buffer_new_allocate
(
NULL
,
self
->
period_size
,
NULL
);
gst_buffer_memset
(
ret
,
0
,
0
,
self
->
period_size
);
gst_buffer_add_audio_meta
(
ret
,
&
self
->
info
,
self
->
period_samples
,
NULL
);
gst_buffer_add_audio_meta
(
ret
,
&
self
->
info
,
self
->
period_samples
,
NULL
);
}
*
buf
=
ret
;
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file