Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
gstreamer-rs
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Service Desk
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Operations
Operations
Incidents
Environments
Packages & Registries
Packages & Registries
Container Registry
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Zeeshan Ali
gstreamer-rs
Commits
3f0cd6fb
Commit
3f0cd6fb
authored
Jul 07, 2018
by
François Laignel
Committed by
Sebastian Dröge
Jul 30, 2018
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Toc: serde impl
parent
20834e47
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
393 additions
and
3 deletions
+393
-3
Gir_Gst.toml
Gir_Gst.toml
+21
-3
gstreamer/src/auto/enums.rs
gstreamer/src/auto/enums.rs
+3
-0
gstreamer/src/toc.rs
gstreamer/src/toc.rs
+369
-0
No files found.
Gir_Gst.toml
View file @
3f0cd6fb
...
...
@@ -51,9 +51,6 @@ generate = [
"Gst.TagSetter"
,
"Gst.QOSType"
,
"Gst.EventType"
,
"Gst.TocScope"
,
"Gst.TocEntryType"
,
"Gst.TocLoopType"
,
"Gst.TocSetter"
,
"Gst.ClockType"
,
"Gst.ElementFlags"
,
...
...
@@ -167,6 +164,27 @@ status = "generate"
name
=
"Serialize, Deserialize"
cfg_condition
=
"feature =
\"
ser_de
\"
"
[[object]]
name
=
"Gst.TocScope"
status
=
"generate"
[[object.derive]]
name
=
"Serialize, Deserialize"
cfg_condition
=
"feature =
\"
ser_de
\"
"
[[object]]
name
=
"Gst.TocEntryType"
status
=
"generate"
[[object.derive]]
name
=
"Serialize, Deserialize"
cfg_condition
=
"feature =
\"
ser_de
\"
"
[[object]]
name
=
"Gst.TocLoopType"
status
=
"generate"
[[object.derive]]
name
=
"Serialize, Deserialize"
cfg_condition
=
"feature =
\"
ser_de
\"
"
[[object]]
name
=
"Gst.Structure"
status
=
"manual"
...
...
gstreamer/src/auto/enums.rs
View file @
3f0cd6fb
...
...
@@ -2493,6 +2493,7 @@ impl SetValue for TaskState {
}
#[derive(Clone,
Copy,
Debug,
Eq,
PartialEq,
Hash)]
#[cfg_attr(feature
=
"ser_de"
,
derive(Serialize,
Deserialize))]
pub
enum
TocEntryType
{
Angle
,
Version
,
...
...
@@ -2565,6 +2566,7 @@ impl SetValue for TocEntryType {
}
#[derive(Clone,
Copy,
Debug,
Eq,
PartialEq,
Hash)]
#[cfg_attr(feature
=
"ser_de"
,
derive(Serialize,
Deserialize))]
pub
enum
TocLoopType
{
None
,
Forward
,
...
...
@@ -2628,6 +2630,7 @@ impl SetValue for TocLoopType {
}
#[derive(Clone,
Copy,
Debug,
Eq,
PartialEq,
Hash)]
#[cfg_attr(feature
=
"ser_de"
,
derive(Serialize,
Deserialize))]
pub
enum
TocScope
{
Global
,
Current
,
...
...
gstreamer/src/toc.rs
View file @
3f0cd6fb
...
...
@@ -263,6 +263,122 @@ impl fmt::Debug for TocEntryRef {
unsafe
impl
Sync
for
TocEntryRef
{}
unsafe
impl
Send
for
TocEntryRef
{}
#[cfg(feature
=
"ser_de"
)]
mod
serde
{
use
serde
::
de
::{
Deserialize
,
Deserializer
};
use
serde
::
ser
::{
Serialize
,
Serializer
,
SerializeStruct
};
use
tags
::
*
;
use
super
::
*
;
impl
Serialize
for
TocRef
{
fn
serialize
<
S
:
Serializer
>
(
&
self
,
serializer
:
S
)
->
Result
<
S
::
Ok
,
S
::
Error
>
{
let
mut
toc
=
serializer
.serialize_struct
(
"Toc"
,
3
)
?
;
toc
.serialize_field
(
"scope"
,
&
self
.get_scope
())
?
;
toc
.serialize_field
(
"tags"
,
&
self
.get_tags
())
?
;
toc
.serialize_field
(
"entries"
,
&
self
.get_entries
())
?
;
toc
.end
()
}
}
impl
Serialize
for
Toc
{
fn
serialize
<
S
:
Serializer
>
(
&
self
,
serializer
:
S
)
->
Result
<
S
::
Ok
,
S
::
Error
>
{
self
.as_ref
()
.serialize
(
serializer
)
}
}
impl
Serialize
for
TocEntryRef
{
fn
serialize
<
S
:
Serializer
>
(
&
self
,
serializer
:
S
)
->
Result
<
S
::
Ok
,
S
::
Error
>
{
let
mut
toc_entry
=
serializer
.serialize_struct
(
"TocEntry"
,
6
)
?
;
toc_entry
.serialize_field
(
"entry_type"
,
&
self
.get_entry_type
())
?
;
toc_entry
.serialize_field
(
"uid"
,
&
self
.get_uid
())
?
;
toc_entry
.serialize_field
(
"start_stop"
,
&
self
.get_start_stop_times
())
?
;
toc_entry
.serialize_field
(
"tags"
,
&
self
.get_tags
())
?
;
toc_entry
.serialize_field
(
"loop_"
,
&
self
.get_loop
())
?
;
toc_entry
.serialize_field
(
"sub_entries"
,
&
self
.get_sub_entries
())
?
;
toc_entry
.end
()
}
}
impl
Serialize
for
TocEntry
{
fn
serialize
<
S
:
Serializer
>
(
&
self
,
serializer
:
S
)
->
Result
<
S
::
Ok
,
S
::
Error
>
{
self
.as_ref
()
.serialize
(
serializer
)
}
}
#[derive(Deserialize)]
struct
TocDe
{
scope
:
TocScope
,
tags
:
Option
<
TagList
>
,
entries
:
Vec
<
TocEntry
>
,
}
impl
From
<
TocDe
>
for
Toc
{
fn
from
(
mut
toc_de
:
TocDe
)
->
Self
{
let
mut
toc
=
Toc
::
new
(
toc_de
.scope
);
{
let
toc
=
toc
.get_mut
()
.unwrap
();
if
let
Some
(
tags
)
=
toc_de
.tags
.take
()
{
toc
.set_tags
(
tags
);
}
let
entry_iter
=
toc_de
.entries
.drain
(
..
);
for
entry
in
entry_iter
{
toc
.append_entry
(
entry
);
}
}
toc
}
}
impl
<
'de
>
Deserialize
<
'de
>
for
Toc
{
fn
deserialize
<
D
:
Deserializer
<
'de
>>
(
deserializer
:
D
)
->
Result
<
Self
,
D
::
Error
>
{
TocDe
::
deserialize
(
deserializer
)
.and_then
(|
toc_de
|
Ok
(
toc_de
.into
()))
}
}
#[derive(Deserialize)]
struct
TocEntryDe
{
entry_type
:
TocEntryType
,
uid
:
String
,
start_stop
:
Option
<
(
i64
,
i64
)
>
,
tags
:
Option
<
TagList
>
,
loop_
:
Option
<
(
TocLoopType
,
i32
)
>
,
sub_entries
:
Vec
<
TocEntry
>
,
}
impl
From
<
TocEntryDe
>
for
TocEntry
{
fn
from
(
mut
toc_entry_de
:
TocEntryDe
)
->
Self
{
let
mut
toc_entry
=
TocEntry
::
new
(
toc_entry_de
.entry_type
,
toc_entry_de
.uid
.as_str
());
{
let
toc_entry
=
toc_entry
.get_mut
()
.unwrap
();
if
let
Some
(
start_stop
)
=
toc_entry_de
.start_stop
.take
()
{
toc_entry
.set_start_stop_times
(
start_stop
.0
,
start_stop
.1
);
}
if
let
Some
(
tags
)
=
toc_entry_de
.tags
.take
()
{
toc_entry
.set_tags
(
tags
);
}
if
let
Some
(
loop_
)
=
toc_entry_de
.loop_
.take
()
{
toc_entry
.set_loop
(
loop_
.0
,
loop_
.1
);
}
let
entry_iter
=
toc_entry_de
.sub_entries
.drain
(
..
);
for
sub_entries
in
entry_iter
{
toc_entry
.append_sub_entry
(
sub_entries
);
}
}
toc_entry
}
}
impl
<
'de
>
Deserialize
<
'de
>
for
TocEntry
{
fn
deserialize
<
D
:
Deserializer
<
'de
>>
(
deserializer
:
D
)
->
Result
<
Self
,
D
::
Error
>
{
TocEntryDe
::
deserialize
(
deserializer
)
.and_then
(|
toc_entry_de
|
Ok
(
toc_entry_de
.into
()))
}
}
}
#[cfg(test)]
mod
tests
{
use
super
::
*
;
...
...
@@ -311,4 +427,257 @@ mod tests {
assert
!
(
parent
.is_some
());
assert_eq!
(
parent
.unwrap
()
.get_entry_type
(),
TocEntryType
::
Chapter
);
}
#[cfg(feature
=
"ser_de"
)]
#[test]
fn
test_serialize
()
{
extern
crate
ron
;
use
tags
::
Title
;
use
super
::
*
;
::
init
()
.unwrap
();
let
mut
toc
=
Toc
::
new
(
TocScope
::
Global
);
{
let
toc
=
toc
.get_mut
()
.unwrap
();
let
mut
tags
=
TagList
::
new
();
tags
.get_mut
()
.unwrap
()
.add
::
<
Title
>
(
&
"toc"
,
TagMergeMode
::
Append
);
toc
.set_tags
(
tags
);
let
mut
toc_edition
=
TocEntry
::
new
(
TocEntryType
::
Edition
,
"edition"
);
{
let
toc_edition
=
toc_edition
.get_mut
()
.unwrap
();
toc_edition
.set_start_stop_times
(
0
,
15
);
let
mut
toc_chap_1
=
TocEntry
::
new
(
TocEntryType
::
Chapter
,
"chapter1"
);
{
let
toc_chap_1
=
toc_chap_1
.get_mut
()
.unwrap
();
toc_chap_1
.set_start_stop_times
(
0
,
10
);
let
mut
toc_chap_1_1
=
TocEntry
::
new
(
TocEntryType
::
Chapter
,
"chapter1.1"
);
{
let
toc_chap_1_1
=
toc_chap_1_1
.get_mut
()
.unwrap
();
toc_chap_1_1
.set_start_stop_times
(
0
,
4
);
let
mut
tags
=
TagList
::
new
();
tags
.get_mut
()
.unwrap
()
.add
::
<
Title
>
(
&
"chapter 1.1"
,
TagMergeMode
::
Append
);
toc_chap_1_1
.set_tags
(
tags
);
}
toc_chap_1
.append_sub_entry
(
toc_chap_1_1
);
let
mut
toc_chap_1_2
=
TocEntry
::
new
(
TocEntryType
::
Chapter
,
"chapter1.2"
);
{
let
toc_chap_1_2
=
toc_chap_1_2
.get_mut
()
.unwrap
();
toc_chap_1_2
.set_start_stop_times
(
4
,
10
);
let
mut
tags
=
TagList
::
new
();
tags
.get_mut
()
.unwrap
()
.add
::
<
Title
>
(
&
"chapter 1.2"
,
TagMergeMode
::
Append
);
toc_chap_1_2
.set_tags
(
tags
);
}
toc_chap_1
.append_sub_entry
(
toc_chap_1_2
);
}
toc_edition
.append_sub_entry
(
toc_chap_1
);
let
mut
toc_chap_2
=
TocEntry
::
new
(
TocEntryType
::
Chapter
,
"chapter2"
);
{
let
toc_chap_2
=
toc_chap_2
.get_mut
()
.unwrap
();
toc_chap_2
.set_start_stop_times
(
10
,
15
);
let
mut
tags
=
TagList
::
new
();
tags
.get_mut
()
.unwrap
()
.add
::
<
Title
>
(
&
"chapter 2"
,
TagMergeMode
::
Append
);
toc_chap_2
.set_tags
(
tags
);
}
toc_edition
.append_sub_entry
(
toc_chap_2
);
}
toc
.append_entry
(
toc_edition
);
}
// don't use newlines
let
mut
pretty_config
=
ron
::
ser
::
PrettyConfig
::
default
();
pretty_config
.new_line
=
""
.to_string
();
let
res
=
ron
::
ser
::
to_string_pretty
(
&
toc
,
pretty_config
);
assert_eq!
(
Ok
(
concat!
(
"("
,
" scope: Global,"
,
" tags: Some(["
,
" (
\"
title
\"
,
\"
toc
\"
),"
,
" ]),"
,
" entries: ["
,
" ("
,
" entry_type: Edition,"
,
" uid:
\"
edition
\"
,"
,
" start_stop: Some((0, 15)),"
,
" tags: None,"
,
" loop_: Some((None, 0)),"
,
" sub_entries: ["
,
" ("
,
" entry_type: Chapter,"
,
" uid:
\"
chapter1
\"
,"
,
" start_stop: Some((0, 10)),"
,
" tags: None,"
,
" loop_: Some((None, 0)),"
,
" sub_entries: ["
,
" ("
,
" entry_type: Chapter,"
,
" uid:
\"
chapter1.1
\"
,"
,
" start_stop: Some((0, 4)),"
,
" tags: Some(["
,
" (
\"
title
\"
,
\"
chapter 1.1
\"
),"
,
" ]),"
,
" loop_: Some((None, 0)),"
,
" sub_entries: ["
,
" ],"
,
" ),"
,
" ("
,
" entry_type: Chapter,"
,
" uid:
\"
chapter1.2
\"
,"
,
" start_stop: Some((4, 10)),"
,
" tags: Some(["
,
" (
\"
title
\"
,
\"
chapter 1.2
\"
),"
,
" ]),"
,
" loop_: Some((None, 0)),"
,
" sub_entries: ["
,
" ],"
,
" ),"
,
" ],"
,
" ),"
,
" ("
,
" entry_type: Chapter,"
,
" uid:
\"
chapter2
\"
,"
,
" start_stop: Some((10, 15)),"
,
" tags: Some(["
,
" (
\"
title
\"
,
\"
chapter 2
\"
),"
,
" ]),"
,
" loop_: Some((None, 0)),"
,
" sub_entries: ["
,
" ],"
,
" ),"
,
" ],"
,
" ),"
,
" ],"
,
")"
,
)
.to_owned
()
),
res
,
);
}
#[cfg(feature
=
"ser_de"
)]
#[test]
fn
test_deserialize
()
{
extern
crate
ron
;
use
tags
::
Title
;
::
init
()
.unwrap
();
let
toc_ron
=
r#"
(
scope: Global,
tags: Some([
("title", "toc"),
]),
entries: [
(
entry_type: Edition,
uid: "edition",
start_stop: Some((0, 15)),
tags: None,
loop_: Some((None, 0)),
sub_entries: [
(
entry_type: Chapter,
uid: "chapter1",
start_stop: Some((0, 10)),
tags: None,
loop_: Some((None, 0)),
sub_entries: [
(
entry_type: Chapter,
uid: "chapter1.1",
start_stop: Some((0, 4)),
tags: Some([
("title", "chapter 1.1"),
]),
loop_: Some((None, 0)),
sub_entries: [
],
),
(
entry_type: Chapter,
uid: "chapter1.2",
start_stop: Some((4, 10)),
tags: Some([
("title", "chapter 1.2"),
]),
loop_: Some((None, 0)),
sub_entries: [
],
),
],
),
(
entry_type: Chapter,
uid: "chapter2",
start_stop: Some((10, 15)),
tags: Some([
("title", "chapter 2"),
]),
loop_: Some((None, 0)),
sub_entries: [
],
),
],
),
],
)
"#
;
let
toc
:
Toc
=
ron
::
de
::
from_str
(
toc_ron
)
.unwrap
();
assert_eq!
(
toc
.get_scope
(),
TocScope
::
Global
);
let
entries
=
toc
.get_entries
();
assert_eq!
(
1
,
entries
.len
());
let
edition
=
&
entries
[
0
];
assert_eq!
(
TocEntryType
::
Edition
,
edition
.get_entry_type
());
assert_eq!
(
"edition"
,
edition
.get_uid
());
assert
!
(
edition
.get_tags
()
.is_none
());
assert_eq!
(
Some
((
0
,
15
)),
edition
.get_start_stop_times
());
let
sub_entries
=
edition
.get_sub_entries
();
assert_eq!
(
2
,
sub_entries
.len
());
let
chapter1
=
&
sub_entries
[
0
];
assert_eq!
(
TocEntryType
::
Chapter
,
chapter1
.get_entry_type
());
assert_eq!
(
"chapter1"
,
chapter1
.get_uid
());
assert
!
(
chapter1
.get_tags
()
.is_none
());
assert_eq!
(
Some
((
0
,
10
)),
chapter1
.get_start_stop_times
());
let
chap1_sub_entries
=
chapter1
.get_sub_entries
();
assert_eq!
(
2
,
sub_entries
.len
());
let
chapter1_1
=
&
chap1_sub_entries
[
0
];
assert_eq!
(
TocEntryType
::
Chapter
,
chapter1_1
.get_entry_type
());
assert_eq!
(
"chapter1.1"
,
chapter1_1
.get_uid
());
assert_eq!
(
Some
((
0
,
4
)),
chapter1_1
.get_start_stop_times
());
let
tags
=
chapter1_1
.get_tags
()
.unwrap
();
assert_eq!
(
Some
(
"chapter 1.1"
),
tags
.get_index
::
<
Title
>
(
0
)
.unwrap
()
.get
());
assert_eq!
(
0
,
chapter1_1
.get_sub_entries
()
.len
());
let
chapter1_2
=
&
chap1_sub_entries
[
1
];
assert_eq!
(
TocEntryType
::
Chapter
,
chapter1_2
.get_entry_type
());
assert_eq!
(
"chapter1.2"
,
chapter1_2
.get_uid
());
assert_eq!
(
Some
((
4
,
10
)),
chapter1_2
.get_start_stop_times
());
let
tags
=
chapter1_2
.get_tags
()
.unwrap
();
assert_eq!
(
Some
(
"chapter 1.2"
),
tags
.get_index
::
<
Title
>
(
0
)
.unwrap
()
.get
());
assert_eq!
(
0
,
chapter1_2
.get_sub_entries
()
.len
());
let
chapter2
=
&
sub_entries
[
1
];
assert_eq!
(
TocEntryType
::
Chapter
,
chapter2
.get_entry_type
());
assert_eq!
(
"chapter2"
,
chapter2
.get_uid
());
let
tags
=
chapter2
.get_tags
()
.unwrap
();
assert_eq!
(
Some
(
"chapter 2"
),
tags
.get_index
::
<
Title
>
(
0
)
.unwrap
()
.get
());
assert_eq!
(
Some
((
10
,
15
)),
chapter2
.get_start_stop_times
());
assert_eq!
(
0
,
chapter2
.get_sub_entries
()
.len
());
}
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment