This commit is contained in:
Jean-Marie Mineau 2023-07-21 11:36:33 +02:00
commit ab1d6f4f8c
9 changed files with 1452 additions and 0 deletions

1
.gitignore vendored Normal file
View file

@ -0,0 +1 @@
/target

55
Cargo.lock generated Normal file
View file

@ -0,0 +1,55 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "androscalpel_serializer"
version = "0.1.0"
dependencies = [
"androscalpel_serializer_derive",
]
[[package]]
name = "androscalpel_serializer_derive"
version = "0.1.0"
dependencies = [
"androscalpel_serializer",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "proc-macro2"
version = "1.0.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5fe8a65d69dd0808184ebb5f836ab526bb259db23c657efa38711b1072ee47f0"
dependencies = [
"proc-macro2",
]
[[package]]
name = "syn"
version = "2.0.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b60f673f44a8255b9c8c657daf66a596d435f2da81a555b06dc644d080ba45e0"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c"

6
Cargo.toml Normal file
View file

@ -0,0 +1,6 @@
[workspace]
members = [
"androscalpel_serializer",
"androscalpel_serializer_derive",
]

2
androscalpel_serializer/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
/target
/Cargo.lock

View file

@ -0,0 +1,7 @@
[package]
name = "androscalpel_serializer"
version = "0.1.0"
edition = "2021"
[dependencies]
androscalpel_serializer_derive = { path = "../androscalpel_serializer_derive" }

View file

@ -0,0 +1,846 @@
use std::io::{Cursor, Read, Seek, SeekFrom, Write};
pub use androscalpel_serializer_derive::*;
#[derive(Debug, PartialEq, Eq)]
pub enum Error {
InputTooSmall(String), // TODO: find a better name
SerializeationError(String),
}
pub type Result<T> = core::result::Result<T, Error>;
pub trait ReadSeek: Read + Seek {}
impl<T: Read + Seek> ReadSeek for T {}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Self::InputTooSmall(msg) => write!(f, "Error: {}", msg),
Self::SerializeationError(msg) => write!(f, "Error: {}", msg),
}
}
}
impl std::error::Error for Error {}
pub trait Serializable {
fn serialize(&self, output: &mut dyn Write) -> Result<()>;
fn deserialize(input: &mut dyn ReadSeek) -> Result<Self>
where
Self: Sized;
fn size(&self) -> usize;
fn serialize_to_vec(&self) -> Result<Vec<u8>> {
let mut output = Cursor::new(Vec::<u8>::new());
self.serialize(&mut output)?;
Ok(output.into_inner())
}
fn deserialize_from_slice(input: &[u8]) -> Result<Self>
where
Self: Sized,
{
let mut buffer = Cursor::new(input);
Self::deserialize(&mut buffer)
}
}
pub trait SerializableUntil<D: Serializable, U: Serializable + Eq> {
fn serialize(&self, output: &mut dyn Write, end_val: U) -> Result<()>;
fn deserialize(input: &mut dyn ReadSeek, end_val: U) -> Result<Self>
where
Self: Sized;
fn size(&self, end_val: U) -> usize;
fn serialize_to_vec(&self, end_val: U) -> Result<Vec<u8>> {
let mut output = Cursor::new(Vec::<u8>::new());
self.serialize(&mut output, end_val)?;
Ok(output.into_inner())
}
fn deserialize_from_slice(input: &[u8], end_val: U) -> Result<Self>
where
Self: Sized,
{
let mut buffer = Cursor::new(input);
Self::deserialize(&mut buffer, end_val)
}
}
impl<D: Serializable, U: Serializable + Eq> SerializableUntil<D, U> for Vec<D> {
fn serialize(&self, output: &mut dyn Write, end_val: U) -> Result<()> {
for data in self {
data.serialize(output)?;
}
end_val.serialize(output)
}
fn deserialize(input: &mut dyn ReadSeek, end_val: U) -> Result<Self>
where
Self: Sized,
{
let mut data = Self::new();
loop {
let pos = input.stream_position().map_err(|err| {
Error::SerializeationError(format!("Failled to get position in steam: {err}"))
})?;
match U::deserialize(input) {
Ok(val) if val == end_val => break Ok(data),
Err(err) => break Err(err),
Ok(_) => input.seek(SeekFrom::Start(pos)).map_err(|err| {
Error::SerializeationError(format!(
"Failled to get to position in steam: {err}"
))
})?,
};
data.push(D::deserialize(input)?);
}
}
fn size(&self, end_val: U) -> usize {
self.iter().map(|data| data.size()).sum::<usize>() + end_val.size()
}
}
impl Serializable for u8 {
fn serialize(&self, output: &mut dyn Write) -> Result<()> {
output.write_all(&self.to_be_bytes()).map_err(|err| {
Error::SerializeationError(format!("Failed to write serialize u8 to output: {err}"))
})?;
Ok(())
}
fn deserialize(input: &mut dyn ReadSeek) -> Result<Self> {
let mut buffer = [0u8; 1];
input.read_exact(&mut buffer).map_err(|_| {
Error::InputTooSmall("Failed to read all bytes for u8 from the input".into())
})?;
Ok(Self::from_be_bytes(buffer))
}
fn size(&self) -> usize {
1
}
}
impl Serializable for i8 {
fn serialize(&self, output: &mut dyn Write) -> Result<()> {
output.write_all(&self.to_be_bytes()).map_err(|err| {
Error::SerializeationError(format!("Failed to write serialize i8 to output: {err}"))
})?;
Ok(())
}
fn deserialize(input: &mut dyn ReadSeek) -> Result<Self> {
let mut buffer = [0u8; 1];
input.read_exact(&mut buffer).map_err(|_| {
Error::InputTooSmall("Failed to read all bytes for i8 from the input".into())
})?;
Ok(Self::from_be_bytes(buffer))
}
fn size(&self) -> usize {
1
}
}
impl Serializable for u16 {
fn serialize(&self, output: &mut dyn Write) -> Result<()> {
output.write_all(&self.to_be_bytes()).map_err(|err| {
Error::SerializeationError(format!("Failed to write serialize u16 to output: {err}"))
})?;
Ok(())
}
fn deserialize(input: &mut dyn ReadSeek) -> Result<Self> {
let mut buffer = [0u8; 2];
input.read_exact(&mut buffer).map_err(|_| {
Error::InputTooSmall("Failed to read all bytes for u16 from the input".into())
})?;
Ok(Self::from_be_bytes(buffer))
}
fn size(&self) -> usize {
2
}
}
impl Serializable for i16 {
fn serialize(&self, output: &mut dyn Write) -> Result<()> {
output.write_all(&self.to_be_bytes()).map_err(|err| {
Error::SerializeationError(format!("Failed to write serialize i16 to output: {err}"))
})?;
Ok(())
}
fn deserialize(input: &mut dyn ReadSeek) -> Result<Self> {
let mut buffer = [0u8; 2];
input.read_exact(&mut buffer).map_err(|_| {
Error::InputTooSmall("Failed to read all bytes for i16 from the input".into())
})?;
Ok(Self::from_be_bytes(buffer))
}
fn size(&self) -> usize {
2
}
}
impl Serializable for u32 {
fn serialize(&self, output: &mut dyn Write) -> Result<()> {
output.write_all(&self.to_be_bytes()).map_err(|err| {
Error::SerializeationError(format!("Failed to write serialize u32 to output: {err}"))
})?;
Ok(())
}
fn deserialize(input: &mut dyn ReadSeek) -> Result<Self> {
let mut buffer = [0u8; 4];
input.read_exact(&mut buffer).map_err(|_| {
Error::InputTooSmall("Failed to read all bytes for u32 from the input".into())
})?;
Ok(Self::from_be_bytes(buffer))
}
fn size(&self) -> usize {
4
}
}
impl Serializable for i32 {
fn serialize(&self, output: &mut dyn Write) -> Result<()> {
output.write_all(&self.to_be_bytes()).map_err(|err| {
Error::SerializeationError(format!("Failed to write serialize i32 to output: {err}"))
})?;
Ok(())
}
fn deserialize(input: &mut dyn ReadSeek) -> Result<Self> {
let mut buffer = [0u8; 4];
input.read_exact(&mut buffer).map_err(|_| {
Error::InputTooSmall("Failed to read all bytes for i32 from the input".into())
})?;
Ok(Self::from_be_bytes(buffer))
}
fn size(&self) -> usize {
4
}
}
impl Serializable for u64 {
fn serialize(&self, output: &mut dyn Write) -> Result<()> {
output.write_all(&self.to_be_bytes()).map_err(|err| {
Error::SerializeationError(format!("Failed to write serialize u64 to output: {err}"))
})?;
Ok(())
}
fn deserialize(input: &mut dyn ReadSeek) -> Result<Self> {
let mut buffer = [0u8; 8];
input.read_exact(&mut buffer).map_err(|_| {
Error::InputTooSmall("Failed to read all bytes for u64 from the input".into())
})?;
Ok(Self::from_be_bytes(buffer))
}
fn size(&self) -> usize {
8
}
}
impl Serializable for i64 {
fn serialize(&self, output: &mut dyn Write) -> Result<()> {
output.write_all(&self.to_be_bytes()).map_err(|err| {
Error::SerializeationError(format!("Failed to write serialize i64 to output: {err}"))
})?;
Ok(())
}
fn deserialize(input: &mut dyn ReadSeek) -> Result<Self> {
let mut buffer = [0u8; 8];
input.read_exact(&mut buffer).map_err(|_| {
Error::InputTooSmall("Failed to read all bytes for i64 from the input".into())
})?;
Ok(Self::from_be_bytes(buffer))
}
fn size(&self) -> usize {
8
}
}
impl Serializable for u128 {
fn serialize(&self, output: &mut dyn Write) -> Result<()> {
output.write_all(&self.to_be_bytes()).map_err(|err| {
Error::SerializeationError(format!("Failed to write serialize u128 to output: {err}"))
})?;
Ok(())
}
fn deserialize(input: &mut dyn ReadSeek) -> Result<Self> {
let mut buffer = [0u8; 16];
input.read_exact(&mut buffer).map_err(|_| {
Error::InputTooSmall("Failed to read all bytes for u128 from the input".into())
})?;
Ok(Self::from_be_bytes(buffer))
}
fn size(&self) -> usize {
16
}
}
impl Serializable for i128 {
fn serialize(&self, output: &mut dyn Write) -> Result<()> {
output.write_all(&self.to_be_bytes()).map_err(|err| {
Error::SerializeationError(format!("Failed to write serialize i128 to output: {err}"))
})?;
Ok(())
}
fn deserialize(input: &mut dyn ReadSeek) -> Result<Self> {
let mut buffer = [0u8; 16];
input.read_exact(&mut buffer).map_err(|_| {
Error::InputTooSmall("Failed to read all bytes for i128 from the input".into())
})?;
Ok(Self::from_be_bytes(buffer))
}
fn size(&self) -> usize {
16
}
}
#[cfg(test)]
mod test {
use super::*;
// Hacky hack to refere to "crate" by its name
use crate as androscalpel_serializer;
#[test]
fn serialize_u8() {
assert_eq!(42u8.serialize_to_vec().unwrap(), vec![42u8]);
}
#[test]
fn deserialize_u8() {
assert_eq!(u8::deserialize_from_slice(&[42u8]).unwrap(), 42u8);
}
#[test]
fn size_u8() {
assert_eq!(42u8.size(), 1);
}
#[test]
fn serialize_u16() {
assert_eq!(0x1234u16.serialize_to_vec().unwrap(), vec![0x12u8, 0x34u8]);
}
#[test]
fn deserialize_u16() {
assert_eq!(
u16::deserialize_from_slice(&[0x12u8, 0x34u8]).unwrap(),
0x1234u16
);
}
#[test]
fn size_u16() {
assert_eq!(0x1234u16.size(), 2);
}
#[test]
fn serialize_u32() {
assert_eq!(
0x12345678u32.serialize_to_vec().unwrap(),
vec![0x12u8, 0x34u8, 0x56u8, 0x78u8]
);
}
#[test]
fn deserialize_u32() {
assert_eq!(
u32::deserialize_from_slice(&[0x12u8, 0x34u8, 0x56u8, 0x78u8]).unwrap(),
0x12345678u32
);
}
#[test]
fn size_u32() {
assert_eq!(0x12345678u32.size(), 4);
}
#[test]
fn serialize_u64() {
assert_eq!(
0x123456789ABCDEF0u64.serialize_to_vec().unwrap(),
vec![0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8]
);
}
#[test]
fn deserialize_u64() {
assert_eq!(
u64::deserialize_from_slice(&[
0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8
])
.unwrap(),
0x123456789ABCDEF0u64
);
}
#[test]
fn size_u64() {
assert_eq!(0x123456789ABCDEF0u64.size(), 8);
}
#[test]
fn serialize_u128() {
assert_eq!(
0x123456789ABCDEF0123456789ABCDEF0u128
.serialize_to_vec()
.unwrap(),
vec![
0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8,
0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8
]
);
}
#[test]
fn deserialize_u128() {
assert_eq!(
u128::deserialize_from_slice(&[
0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8,
0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8
])
.unwrap(),
0x123456789ABCDEF0123456789ABCDEF0u128
);
}
#[test]
fn size_u128() {
assert_eq!(0x123456789ABCDEF0123456789ABCDEF0u128.size(), 16);
}
#[derive(Serializable, Debug, PartialEq, Eq)]
struct NamedComposedTest {
a: u8,
b: u16,
c: u32,
d: u64,
f: u128,
}
#[derive(Serializable, Debug, PartialEq, Eq)]
struct UnamedComposedTest(u8, u16, u32, u64, u128);
#[test]
fn serialize_unnamed_derived() {
let t = UnamedComposedTest(
42u8,
0x1234u16,
0x12345678u32,
0x123456789ABCDEF0u64,
0x123456789ABCDEF0123456789ABCDEF0u128,
);
assert_eq!(
t.serialize_to_vec().unwrap(),
vec![
42u8, 0x12u8, 0x34u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x12u8, 0x34u8, 0x56u8,
0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8,
0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8,
0xF0u8
]
);
}
#[test]
fn deserialize_named_derived() {
let t = UnamedComposedTest(
42u8,
0x1234u16,
0x12345678u32,
0x123456789ABCDEF0u64,
0x123456789ABCDEF0123456789ABCDEF0u128,
);
let serialized = vec![
42u8, 0x12u8, 0x34u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8,
0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8,
0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8,
];
assert_eq!(
UnamedComposedTest::deserialize_from_slice(&serialized).unwrap(),
t
);
}
#[test]
fn size_unnamed_derived() {
let t = UnamedComposedTest(
42u8,
0x1234u16,
0x12345678u32,
0x123456789ABCDEF0u64,
0x123456789ABCDEF0123456789ABCDEF0u128,
);
assert_eq!(t.size(), 1 + 2 + 4 + 8 + 16);
}
#[derive(Serializable, Debug, PartialEq, Eq)]
struct NamedArrayComposedTest {
a: [u8; 1],
b: [u16; 2],
c: [u32; 3],
d: [u64; 4],
e: [u128; 5],
}
#[derive(Serializable, Debug, PartialEq, Eq)]
struct UnamedArrayComposedTest([u8; 1], [u16; 2], [u32; 3], [u64; 4], [u128; 5]);
#[test]
fn serialize_array_unamed_derived() {
let t = UnamedArrayComposedTest(
[42u8],
[0x1234u16, 0x1234u16],
[0x12345678u32, 0x12345678u32, 0x12345678u32],
[
0x123456789ABCDEF0u64,
0x123456789ABCDEF0u64,
0x123456789ABCDEF0u64,
0x123456789ABCDEF0u64,
],
[
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
],
);
assert_eq!(
t.serialize_to_vec().unwrap(),
vec![
42u8, 0x12u8, 0x34u8, 0x12u8, 0x34u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x12u8,
0x34u8, 0x56u8, 0x78u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x12u8, 0x34u8, 0x56u8,
0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8,
0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8,
0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8,
0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8,
0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8,
0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8,
0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8,
0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8,
0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8,
0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8,
0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8
]
);
}
#[test]
fn deserialize_array_unamed_derived() {
let t = UnamedArrayComposedTest(
[42u8],
[0x1234u16, 0x1234u16],
[0x12345678u32, 0x12345678u32, 0x12345678u32],
[
0x123456789ABCDEF0u64,
0x123456789ABCDEF0u64,
0x123456789ABCDEF0u64,
0x123456789ABCDEF0u64,
],
[
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
],
);
let serialized = vec![
42u8, 0x12u8, 0x34u8, 0x12u8, 0x34u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x12u8, 0x34u8,
0x56u8, 0x78u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8,
0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8,
0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8,
0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8,
0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8,
0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8,
0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8,
0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8,
0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8,
0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8,
0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8,
];
assert_eq!(
UnamedArrayComposedTest::deserialize_from_slice(&serialized).unwrap(),
t
);
}
#[test]
fn size_array_unamed_derived() {
let t = UnamedArrayComposedTest(
[42u8],
[0x1234u16, 0x1234u16],
[0x12345678u32, 0x12345678u32, 0x12345678u32],
[
0x123456789ABCDEF0u64,
0x123456789ABCDEF0u64,
0x123456789ABCDEF0u64,
0x123456789ABCDEF0u64,
],
[
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
],
);
assert_eq!(t.size(), 1 + 2 * 2 + 4 * 3 + 8 * 4 + 16 * 5);
}
#[test]
fn serialize_array_named_derived() {
let t = NamedArrayComposedTest {
a: [42u8],
b: [0x1234u16, 0x1234u16],
c: [0x12345678u32, 0x12345678u32, 0x12345678u32],
d: [
0x123456789ABCDEF0u64,
0x123456789ABCDEF0u64,
0x123456789ABCDEF0u64,
0x123456789ABCDEF0u64,
],
e: [
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
],
};
assert_eq!(
t.serialize_to_vec().unwrap(),
vec![
42u8, 0x12u8, 0x34u8, 0x12u8, 0x34u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x12u8,
0x34u8, 0x56u8, 0x78u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x12u8, 0x34u8, 0x56u8,
0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8,
0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8,
0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8,
0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8,
0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8,
0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8,
0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8,
0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8,
0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8,
0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8,
0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8
]
);
}
#[test]
fn deserialize_array_named_derived() {
let t = NamedArrayComposedTest {
a: [42u8],
b: [0x1234u16, 0x1234u16],
c: [0x12345678u32, 0x12345678u32, 0x12345678u32],
d: [
0x123456789ABCDEF0u64,
0x123456789ABCDEF0u64,
0x123456789ABCDEF0u64,
0x123456789ABCDEF0u64,
],
e: [
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
],
};
let serialized = vec![
42u8, 0x12u8, 0x34u8, 0x12u8, 0x34u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x12u8, 0x34u8,
0x56u8, 0x78u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8,
0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8,
0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8,
0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8,
0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8,
0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8,
0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8,
0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8,
0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8,
0xBCu8, 0xDEu8, 0xF0u8, 0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8,
0x12u8, 0x34u8, 0x56u8, 0x78u8, 0x9Au8, 0xBCu8, 0xDEu8, 0xF0u8,
];
assert_eq!(
NamedArrayComposedTest::deserialize_from_slice(&serialized).unwrap(),
t
);
}
#[test]
fn size_array_named_derived() {
let t = NamedArrayComposedTest {
a: [42u8],
b: [0x1234u16, 0x1234u16],
c: [0x12345678u32, 0x12345678u32, 0x12345678u32],
d: [
0x123456789ABCDEF0u64,
0x123456789ABCDEF0u64,
0x123456789ABCDEF0u64,
0x123456789ABCDEF0u64,
],
e: [
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
0x123456789ABCDEF0123456789ABCDEF0u128,
],
};
assert_eq!(t.size(), 1 + 2 * 2 + 4 * 3 + 8 * 4 + 16 * 5);
}
#[test]
fn size_vec_serializable_until() {
#[derive(Serializable, PartialEq, Eq)]
struct UntilArray([u8; 3]);
let v = vec![0u8, 1u8, 2u8, 3u8];
assert_eq!(v.size(UntilArray([0, 0, 0])), 7);
assert_eq!(v.size(0u8), 5);
}
#[test]
fn serialize_vec_serializable_until() {
#[derive(Serializable, PartialEq, Eq)]
struct UntilArray([u8; 3]);
let v = vec![0u8, 1u8, 2u8, 3u8];
assert_eq!(
v.serialize_to_vec(UntilArray([0, 0, 0])).unwrap(),
vec![0u8, 1u8, 2u8, 3u8, 0u8, 0u8, 0u8]
);
assert_eq!(
v.serialize_to_vec(0u8).unwrap(),
vec![0u8, 1u8, 2u8, 3u8, 0u8]
);
}
#[test]
fn deserialize_vec_serializable_until() {
#[derive(Serializable, PartialEq, Eq)]
struct UntilArray([u8; 3]);
let v1 = vec![0u8, 1u8, 2u8, 3u8];
let serialized_1 = &[0u8, 1u8, 2u8, 3u8, 0u8, 0u8, 0u8];
let serialized_2 = &[0u8, 1u8, 2u8, 3u8, 0u8];
assert_eq!(
v1,
Vec::<u8>::deserialize_from_slice(serialized_1, UntilArray([0, 0, 0])).unwrap()
);
assert_eq!(
Vec::<u8>::new(),
Vec::<u8>::deserialize_from_slice(serialized_2, 0u8).unwrap()
);
}
#[derive(Serializable, PartialEq, Eq, Debug)]
struct UntilArray2([u8; 2]);
#[derive(Serializable, Debug, PartialEq, Eq)]
struct SerializableUntilNamed {
pr: u16,
#[until(u8, UntilArray2, UntilArray2([0u8, 0u8]))]
v: Vec<u8>,
ps: u32,
}
#[test]
fn size_named_serializable_until() {
let v = SerializableUntilNamed {
pr: 0,
v: vec![1, 2, 3],
ps: 0,
};
assert_eq!(v.size(), 2 + 3 + 2 + 4);
}
#[test]
fn serialize_named_serializable_until() {
let v = SerializableUntilNamed {
pr: 0,
v: vec![1, 2, 3],
ps: 0x80000000,
};
assert_eq!(
v.serialize_to_vec().unwrap(),
vec![0, 0, 1, 2, 3, 0, 0, 0x80, 0, 0, 0],
);
}
#[test]
fn deserialize_named_serializable_until() {
let v = SerializableUntilNamed {
pr: 0,
v: vec![1, 2, 3],
ps: 0x80000000,
};
let s = &[0, 0, 1, 2, 3, 0, 0, 0x80, 0, 0, 0];
assert_eq!(
v,
SerializableUntilNamed::deserialize_from_slice(s).unwrap()
);
}
#[derive(Serializable, PartialEq, Eq, Debug)]
#[prefix_type(u8)]
enum TestEnum {
#[prefix(0)]
Zero,
#[prefix(1)]
One(u32),
#[prefix(2)]
Two { a: u16, b: u32 },
}
#[test]
fn size_enum() {
assert_eq!(TestEnum::Zero.size(), 1);
assert_eq!(TestEnum::One(42).size(), 5);
assert_eq!(TestEnum::Two { a: 1, b: 2 }.size(), 7);
}
#[test]
fn serialize_enum() {
assert_eq!(TestEnum::Zero.serialize_to_vec().unwrap(), vec![0u8]);
assert_eq!(
TestEnum::One(42).serialize_to_vec().unwrap(),
vec![1u8, 0u8, 0u8, 0u8, 42u8]
);
assert_eq!(
TestEnum::Two { a: 1, b: 2 }.serialize_to_vec().unwrap(),
vec![2u8, 0u8, 1u8, 0u8, 0u8, 0u8, 2u8]
);
}
#[test]
fn deserialize_enum() {
assert_eq!(
TestEnum::deserialize_from_slice(&[0u8]).unwrap(),
TestEnum::Zero
);
assert_eq!(
TestEnum::deserialize_from_slice(&[1u8, 0u8, 0u8, 0u8, 42u8]).unwrap(),
TestEnum::One(42)
);
assert_eq!(
TestEnum::deserialize_from_slice(&[2u8, 0u8, 1u8, 0u8, 0u8, 0u8, 2u8]).unwrap(),
TestEnum::Two { a: 1, b: 2 }
);
assert!(TestEnum::deserialize_from_slice(&[255u8]).is_err());
}
}

View file

@ -0,0 +1,2 @@
/target
/Cargo.lock

View file

@ -0,0 +1,15 @@
[package]
name = "androscalpel_serializer_derive"
version = "0.1.0"
edition = "2021"
[lib]
proc-macro = true
[dependencies]
proc-macro2 = "1"
quote = "1"
syn = { version = "2", features = ["parsing"] }
[dev-dependencies]
androscalpel_serializer = { path = "../androscalpel_serializer" }

View file

@ -0,0 +1,518 @@
use proc_macro2::TokenStream;
use quote::{format_ident, quote, quote_spanned};
use syn::parse::{Parse, ParseStream};
use syn::spanned::Spanned;
use syn::{
parse_macro_input, Attribute, Data, DeriveInput, Field, Fields, Ident, Index, Meta, MetaList,
Token, Type, Variant,
};
/// Derive the type Serializable.
///
/// For simple case, it is just a concatenation of the Serializable fields.
///
/// ## Until
///
/// For list of data, we can serialize data until we find some specific data. To do so we need
/// the type of the repeated data `D`, the type of the symbole at the end of the list `U`, and
/// the specific value of the symbole at the end of the list `end`.
///
/// For example, we can deserialize a Vec<u8> until we find 0x0000:
///
/// `[0, 1, 2, 3, 0, 0]` would be serialized as `vec![0, 1, 2, 3]`
///
/// In this example, `D` is `u8`, `U` is `u16` and `end` is `0x0000`.
///
/// To define a field using this method, the type of field must implement the trait
/// `SerializableUntil<D, U>`, `D` and `U` must implement `Serializable`, and the field
/// must be marked with the attribute `#[until(D, U, end)]`:
///
/// ```
/// pub use androscalpel_serializer::*;
///
/// #[derive(Serializable, PartialEq, Debug)]
/// struct Example {
/// #[until(u8, u16, 0)]
/// a: Vec<u8>,
/// }
///
/// assert_eq!(
/// Example::deserialize_from_slice(&[0, 1, 2, 3, 0, 0]).unwrap(),
/// Example { a: vec![0, 1, 2, 3] }
/// );
/// ```
///
#[proc_macro_derive(Serializable, attributes(until, prefix, prefix_type))]
pub fn derive_serializable(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input = parse_macro_input!(input as DeriveInput);
let name = input.ident;
let params = ParamsStruct::parse(&input.attrs);
let implem_serialize = get_implem_serialize(&input.data, &params);
let implem_deserialize = get_implem_deserialize(&input.data, &params);
let implem_size = get_implem_size(&input.data, &params);
let expanded = quote! {
impl androscalpel_serializer::Serializable for #name {
fn serialize(&self, output: &mut dyn std::io::Write) -> androscalpel_serializer::Result<()> {
#implem_serialize
}
fn deserialize(input: &mut dyn androscalpel_serializer::ReadSeek) -> androscalpel_serializer::Result<Self> {
#implem_deserialize
}
fn size(&self) -> usize {
#implem_size
}
}
};
proc_macro::TokenStream::from(expanded)
}
/// Parsed Parameters for the `#[until(D, U, end)]` attribute.
/// `D` is the type of the repeated data, `U` is the type of `end`,
/// `end` is the object that mark the end of the repetition of `U`s.
struct UntilParams(Ident, Ident, TokenStream);
impl Parse for UntilParams {
fn parse(input: ParseStream) -> syn::Result<Self> {
let data_type = input.parse()?;
input.parse::<Token![,]>()?;
let end_type = input.parse()?;
input.parse::<Token![,]>()?;
let until = input.parse()?;
Ok(Self(data_type, end_type, until))
}
}
/// Parsed Parameters for the `#[prefix(val)]` attribute.
struct PrefixParams(TokenStream);
impl Parse for PrefixParams {
fn parse(input: ParseStream) -> syn::Result<Self> {
let value = input.parse()?;
Ok(Self(value))
}
}
/// Parsed Parameters for the `#[prefix_type(T)]` attribute.
struct PrefixTypeParams(TokenStream);
impl Parse for PrefixTypeParams {
fn parse(input: ParseStream) -> syn::Result<Self> {
let data_type = input.parse()?;
Ok(Self(data_type))
}
}
/// All the attributes parameters for a struct/enum
#[derive(Default)]
struct ParamsStruct {
pub prefix_type: Option<PrefixTypeParams>,
}
/// All the attributes parameters for a field
#[derive(Default)]
struct ParamsField {
pub until: Option<UntilParams>,
}
/// All the attributes parameters for a variant
#[derive(Default)]
struct ParamsVariant {
pub prefix: Option<PrefixParams>,
}
impl ParamsStruct {
fn parse(attrs: &[Attribute]) -> Self {
let mut params = ParamsStruct::default();
for attr in attrs {
match &attr.meta {
Meta::List(MetaList { path, .. }) if path.is_ident("until") => {
panic!("Structur/Enum cannot take the attribut 'until'")
}
Meta::List(MetaList { path, .. }) if path.is_ident("prefix") => {
panic!("Structur/Enum cannot take the attribut 'prefix'")
}
Meta::List(MetaList { path, tokens, .. }) if path.is_ident("prefix_type") => {
params.prefix_type = Some(syn::parse2(tokens.clone()).unwrap());
}
_ => unimplemented!(),
}
}
params
}
}
impl ParamsField {
fn parse(attrs: &[Attribute]) -> Self {
let mut params = ParamsField::default();
for attr in attrs {
match &attr.meta {
Meta::List(MetaList { path, tokens, .. }) if path.is_ident("until") => {
params.until = Some(syn::parse2(tokens.clone()).unwrap())
}
Meta::List(MetaList { path, .. }) if path.is_ident("prefix") => {
panic!("Fields cannot take the attribut 'prefix'")
}
Meta::List(MetaList { path, .. }) if path.is_ident("prefix_type") => {
panic!("Fields cannot take the attribut 'prefix_type'")
}
_ => unimplemented!(),
}
}
params
}
}
impl ParamsVariant {
fn parse(attrs: &[Attribute]) -> Self {
let mut params = ParamsVariant::default();
for attr in attrs {
match &attr.meta {
Meta::List(MetaList { path, .. }) if path.is_ident("until") => {
panic!("Variant cannot take the attribut 'until'")
}
Meta::List(MetaList { path, tokens, .. }) if path.is_ident("prefix") => {
params.prefix = Some(syn::parse2(tokens.clone()).unwrap());
}
Meta::List(MetaList { path, .. }) if path.is_ident("prefix_type") => {
panic!("Variant cannot take the attribut 'prefix_type'")
}
_ => unimplemented!(),
}
}
params
}
}
/// Return the match statement an enum variant.
fn get_enum_match(variant: &Variant) -> TokenStream {
match variant.fields {
Fields::Named(ref fields) => {
let recurse = fields.named.iter().map(|f| {
let name = &f.ident;
quote! { #name }
});
quote_spanned! { variant.span() =>
{#(#recurse,)*}
}
}
Fields::Unnamed(ref fields) => {
let names: Vec<_> = fields
.unnamed
.iter()
.enumerate()
.map(|(i, _)| format_ident!("field{}", i))
.collect();
quote_spanned! { variant.span() =>
(#(#names,)*)
}
}
Fields::Unit => quote! {},
}
}
/// Return the implementation of the computation of [`androscalpel_serializer::Serializable::size`]
/// for a specific field `f` accessible using `field_ref`.
fn get_implem_size_for_field(f: &Field, field_ref: TokenStream) -> TokenStream {
let params = ParamsField::parse(&f.attrs);
match (&f.ty, params) {
(
_,
ParamsField {
until: Some(UntilParams(d, u, until)),
},
) => quote_spanned! { f.span() =>
androscalpel_serializer::SerializableUntil::<#d, #u>::size(&#field_ref, #until)
},
(Type::Array(_), ParamsField { until: None }) => quote_spanned! { f.span() =>
#field_ref.iter().map(androscalpel_serializer::Serializable::size).sum::<usize>()
},
(_, ParamsField { until: None }) => quote_spanned! { f.span() =>
androscalpel_serializer::Serializable::size(&#field_ref)
},
}
}
/// Return the implementation of the [`androscalpel_serializer::Serializable::size`].
fn get_implem_size(data: &Data, params: &ParamsStruct) -> TokenStream {
match *data {
Data::Struct(ref data) => match data.fields {
Fields::Named(ref fields) => {
let recurse = fields.named.iter().map(|f| {
let name = &f.ident;
get_implem_size_for_field(f, quote! { self.#name })
});
quote! {
0 #(+ #recurse)*
}
}
Fields::Unnamed(ref fields) => {
let recurse = fields.unnamed.iter().enumerate().map(|(i, f)| {
let index = Index::from(i);
get_implem_size_for_field(f, quote! { self.#index })
});
quote! {
0 #(+ #recurse)*
}
}
Fields::Unit => quote!(0),
},
Data::Enum(ref data) => {
let PrefixTypeParams(prefix_ty) = params.prefix_type.as_ref().expect(
"Cannot derive Serializable for an enum without the #[prefix_type(T)] attribute",
);
let recurse = data.variants.iter().map(|v| {
let ident = &v.ident;
let params = ParamsVariant::parse(&v.attrs);
let prefix = params.prefix.expect(
"Cannot derive Serializable for variant without the #[prefix(val)] attribute",
);
let PrefixParams(val) = prefix;
let match_ = get_enum_match(v);
let body = match v.fields {
Fields::Named(ref fields) => {
let recurse = fields.named.iter().map(|f| {
let name = &f.ident;
get_implem_size_for_field(f, quote! { *#name })
});
quote_spanned! { v.span() =>
<#prefix_ty as androscalpel_serializer::Serializable>::size(&#val) #(+#recurse )*
}
}
Fields::Unnamed(ref fields) => {
let recurse = fields.unnamed.iter().enumerate().map(|(i, f)| {
let ident = format_ident!("field{}", i);
get_implem_size_for_field(f, quote! { *#ident })
});
quote_spanned! { v.span() =>
<#prefix_ty as androscalpel_serializer::Serializable>::size(&#val) #(+#recurse )*
}
}
Fields::Unit => quote! { <#prefix_ty as androscalpel_serializer::Serializable>::size(&#val) },
};
quote_spanned! { v.span() =>
Self::#ident #match_ => { #body },
}
});
quote! { match self {
#(#recurse)*
}}
}
Data::Union(_) => unimplemented!(),
}
}
/// Return the implementation of the computation of [`androscalpel_serializer::Serializable::serialize`]
/// for a specific field `f` accessible using `field_ref`.
fn get_implem_serialize_for_field(f: &Field, field_ref: TokenStream) -> TokenStream {
let params = ParamsField::parse(&f.attrs);
// TODO: Improve error handling
match (&f.ty, params) {
(
_,
ParamsField {
until: Some(UntilParams(d, u, until)),
},
) => quote_spanned! { f.span() =>
androscalpel_serializer::SerializableUntil::<#d, #u>::serialize(&#field_ref, output, #until)?;
},
(Type::Array(_), ParamsField { until: None }) => quote_spanned! { f.span() =>
for x in #field_ref {
androscalpel_serializer::Serializable::serialize(&x, output)?;
}
},
(_, ParamsField { until: None }) => quote_spanned! { f.span() =>
androscalpel_serializer::Serializable::serialize(&#field_ref, output)?;
},
}
}
/// Return the implementation of the [`androscalpel_serializer::Serializable::serialize`].
fn get_implem_serialize(data: &Data, params: &ParamsStruct) -> TokenStream {
match *data {
Data::Struct(ref data) => match data.fields {
Fields::Named(ref fields) => {
let recurse = fields.named.iter().map(|f| {
let name = &f.ident;
get_implem_serialize_for_field(f, quote! { self.#name })
});
quote! {
#(#recurse)*
Ok(())
}
}
Fields::Unnamed(ref fields) => {
let recurse = fields.unnamed.iter().enumerate().map(|(i, f)| {
let index = Index::from(i);
get_implem_serialize_for_field(f, quote! { self.#index })
});
quote! {
#(#recurse)*
Ok(())
}
}
Fields::Unit => quote!(0),
},
Data::Enum(ref data) => {
let PrefixTypeParams(prefix_ty) = params.prefix_type.as_ref().expect(
"Cannot derive Serializable for an enum without the #[prefix_type(T)] attribute",
);
let recurse = data.variants.iter().map(|v| {
let ident = &v.ident;
let params = ParamsVariant::parse(&v.attrs);
let prefix = params.prefix.expect(
"Cannot derive Serializable for variant without the #[prefix(val)] attribute",
);
let PrefixParams(val) = prefix;
let match_ = get_enum_match(v);
let body = match v.fields {
Fields::Named(ref fields) => {
let recurse = fields.named.iter().map(|f| {
let name = &f.ident;
get_implem_serialize_for_field(f, quote! { *#name })
});
quote_spanned! { v.span() =>
<#prefix_ty as androscalpel_serializer::Serializable>::serialize(&#val, output)?;
#(#recurse)*
}
}
Fields::Unnamed(ref fields) => {
let recurse = fields.unnamed.iter().enumerate().map(|(i, f)| {
let ident = format_ident!("field{}", i);
get_implem_serialize_for_field(f, quote! { *#ident })
});
quote_spanned! { v.span() =>
<#prefix_ty as androscalpel_serializer::Serializable>::serialize(&#val, output)?;
#(#recurse)*
}
}
Fields::Unit => quote! {
<#prefix_ty as androscalpel_serializer::Serializable>::serialize(&#val, output)?;
},
};
quote_spanned! { v.span() =>
Self::#ident #match_ => { #body },
}
});
quote! {
match self {
#(#recurse)*
}
Ok(())
}
}
Data::Union(_) => unimplemented!(),
}
}
/// Return the implementation of the computation of [`androscalpel_serializer::Serializable::deserialize`]
/// for a specific field `f` accessible using `field_ref`.
fn get_implem_deserialize_for_field(f: &Field, field_ref: TokenStream) -> TokenStream {
let params = ParamsField::parse(&f.attrs);
let ty = &f.ty;
// TODO: Improve error handling
match (ty, params) {
(
_,
ParamsField {
until: Some(UntilParams(d, u, until)),
},
) => quote_spanned! { f.span() =>
#field_ref <#ty as androscalpel_serializer::SerializableUntil::<#d, #u>>::deserialize(input, #until)?,
},
(Type::Array(arr), ParamsField { until: None }) => {
let len = &arr.len;
let arr_ty = &arr.elem;
quote_spanned! { f.span() =>
#field_ref {
let mut vec_ = vec![];
for _ in 0..(#len) {
vec_.push(<#arr_ty as androscalpel_serializer::Serializable>::deserialize(input)?);
}
vec_.try_into().unwrap()
},
}
}
(_, ParamsField { until: None }) => quote_spanned! { f.span() =>
#field_ref <#ty as androscalpel_serializer::Serializable>::deserialize(input)?,
},
}
}
/// Return the implementation of the [`androscalpel_serializer::Serializable::deserialize`].
fn get_implem_deserialize(data: &Data, params: &ParamsStruct) -> TokenStream {
match *data {
Data::Struct(ref data) => match data.fields {
Fields::Named(ref fields) => {
let recurse = fields.named.iter().map(|f| {
let name = &f.ident;
get_implem_deserialize_for_field(f, quote! { #name: })
});
quote! {
Ok(
Self {
#(#recurse)*
}
)
}
}
Fields::Unnamed(ref fields) => {
let recurse = fields
.unnamed
.iter()
.map(|f| get_implem_deserialize_for_field(f, quote! {}));
quote! {
Ok(Self(#(#recurse)*))
}
}
Fields::Unit => quote!(0),
},
Data::Enum(ref data) => {
let PrefixTypeParams(prefix_ty) = params.prefix_type.as_ref().expect(
"Cannot derive Serializable for an enum without the #[prefix_type(T)] attribute",
);
let recurse = data.variants.iter().map(|v| {
let v_ident = &v.ident;
let v_params = ParamsVariant::parse(&v.attrs);
let PrefixParams(val) = v_params.prefix.expect(
"Cannot derive Serializable for variant without the #[prefix(val)] attribute",
);
match v.fields {
Fields::Named(ref fields) => {
let recurse = fields.named.iter().map(|f| {
let name = &f.ident;
get_implem_deserialize_for_field(f, quote! { #name: })
});
quote_spanned! { v.span() =>
if #val == prefix {
return Ok(Self::#v_ident {#(#recurse)*});
}
}
}
Fields::Unnamed(ref fields) => {
let recurse = fields
.unnamed
.iter()
.map(|f| get_implem_deserialize_for_field(f, quote! {}));
quote_spanned! { v.span() =>
if #val == prefix {
return Ok(Self::#v_ident (#(#recurse)*));
}
}
}
Fields::Unit => quote_spanned! { v.span() =>
if #val == prefix {
return Ok(Self::#v_ident);
}
},
}
});
quote! {
let prefix = <#prefix_ty as androscalpel_serializer::Serializable>::deserialize(input)?;
#(#recurse)*
Err(androscalpel_serializer::Error::SerializeationError(format!(
"Found prefix {:?} that did not match any variant.", prefix
)))
}
}
Data::Union(_) => unimplemented!(),
}
}