Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,10 @@ required-features = ["bits"]
name = "ipv4"
required-features = ["bits"]

[[example]]
name = "80211"
required-features = ["bits"]

[[example]]
name = "many"

Expand Down
21 changes: 19 additions & 2 deletions deku-derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,9 @@ struct DekuData {

/// struct only: seek from start position
seek_from_start: Option<TokenStream>,

/// Bit Order for all fields
bit_order: Option<syn::LitStr>,
}

impl DekuData {
Expand Down Expand Up @@ -217,6 +220,7 @@ impl DekuData {
seek_from_current: receiver.seek_from_current?,
seek_from_end: receiver.seek_from_end?,
seek_from_start: receiver.seek_from_start?,
bit_order: receiver.bit_order,
};

DekuData::validate(&data)?;
Expand Down Expand Up @@ -364,6 +368,7 @@ impl<'a> TryFrom<&'a DekuData> for DekuDataEnum<'a> {
#[cfg(not(feature = "bits"))]
None,
deku_data.bytes.as_ref(),
deku_data.bit_order.as_ref(),
)?;

Ok(Self {
Expand Down Expand Up @@ -485,10 +490,10 @@ struct FieldData {
/// condition to parse field
cond: Option<TokenStream>,

// assertion on field
/// assertion on field
assert: Option<TokenStream>,

// assert value of field
/// assert value of field
assert_eq: Option<TokenStream>,

/// seek from current position
Expand All @@ -502,6 +507,9 @@ struct FieldData {

/// seek from start position
seek_from_start: Option<TokenStream>,

/// Bit Order of field
bit_order: Option<syn::LitStr>,
}

impl FieldData {
Expand Down Expand Up @@ -547,6 +555,7 @@ impl FieldData {
seek_from_current: receiver.seek_from_current?,
seek_from_end: receiver.seek_from_end?,
seek_from_start: receiver.seek_from_start?,
bit_order: receiver.bit_order,
};

FieldData::validate(&data)?;
Expand Down Expand Up @@ -780,6 +789,10 @@ struct DekuReceiver {
/// struct only: seek from start position
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
seek_from_start: Result<Option<TokenStream>, ReplacementError>,

/// Bit Order of field
#[darling(default)]
bit_order: Option<syn::LitStr>,
}

type ReplacementError = TokenStream;
Expand Down Expand Up @@ -980,6 +993,10 @@ struct DekuFieldReceiver {
/// seek from start position
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
seek_from_start: Result<Option<TokenStream>, ReplacementError>,

/// Bit Order of field
#[darling(default)]
bit_order: Option<syn::LitStr>,
}

/// Receiver for the variant-level attributes inside a enum
Expand Down
86 changes: 61 additions & 25 deletions deku-derive/src/macros/deku_read.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,15 @@ use darling::ast::{Data, Fields};
use darling::ToTokens;
use proc_macro2::TokenStream;
use quote::quote;
use syn::LitStr;

use crate::macros::{
gen_ctx_types_and_arg, gen_field_args, gen_internal_field_ident, gen_internal_field_idents,
gen_type_from_ctx_id, token_contains_string, wrap_default_ctx,
assertion_failed, gen_bit_order_from_str, gen_ctx_types_and_arg, gen_field_args,
gen_internal_field_idents, token_contains_string, wrap_default_ctx,
};
use crate::{DekuData, DekuDataEnum, DekuDataStruct, FieldData, Id};

use super::assertion_failed;
use super::{gen_internal_field_ident, gen_type_from_ctx_id};

pub(crate) fn emit_deku_read(input: &DekuData) -> Result<TokenStream, syn::Error> {
match &input.data {
Expand Down Expand Up @@ -575,28 +576,59 @@ fn emit_bit_byte_offsets(
}

#[cfg(feature = "bits")]
fn emit_padding(bit_size: &TokenStream) -> TokenStream {
fn emit_padding(bit_size: &TokenStream, bit_order: Option<&LitStr>) -> TokenStream {
let crate_ = super::get_crate_name();
quote! {
{
use core::convert::TryFrom;
// TODO: I hope this consts in most cases?
extern crate alloc;
use alloc::borrow::Cow;
let __deku_pad = usize::try_from(#bit_size).map_err(|e|
::#crate_::DekuError::InvalidParam(Cow::from(format!(
"Invalid padding param \"({})\": cannot convert to usize",
stringify!(#bit_size)
)))
)?;


if (__deku_pad % 8) == 0 {
let bytes_read = __deku_pad / 8;
let mut buf = alloc::vec![0; bytes_read];
let _ = __deku_reader.read_bytes(bytes_read, &mut buf)?;
} else {
let _ = __deku_reader.read_bits(__deku_pad)?;
if let Some(bit_order) = bit_order {
let order = gen_bit_order_from_str(bit_order).unwrap();
quote! {
{
use core::convert::TryFrom;
// TODO: I hope this consts in most cases?
extern crate alloc;
use alloc::borrow::Cow;
let __deku_pad = usize::try_from(#bit_size).map_err(|e|
::#crate_::DekuError::InvalidParam(Cow::from(format!(
"Invalid padding param \"({})\": cannot convert to usize",
stringify!(#bit_size)
)))
)?;


if (__deku_pad % 8) == 0 {
let bytes_read = __deku_pad / 8;
let mut buf = vec![0; bytes_read];
// TODO: use skip_bytes, or Seek in the future?
Copy link
Owner

@sharksforarms sharksforarms Nov 2, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

would it be beneficial to address this TODO now or keep it a TODO?

edit: oops old review comment, I'm ok with leaving this as TODO and addressing later.

let _ = __deku_reader.read_bytes(bytes_read, &mut buf, #order)?;
} else {
// TODO: use skip_bits, or Seek in the future?
let _ = __deku_reader.read_bits(__deku_pad, #order)?;
}
}
}
} else {
quote! {
{
use core::convert::TryFrom;
// TODO: I hope this consts in most cases?
extern crate alloc;
use alloc::borrow::Cow;
let __deku_pad = usize::try_from(#bit_size).map_err(|e|
::#crate_::DekuError::InvalidParam(Cow::from(format!(
"Invalid padding param \"({})\": cannot convert to usize",
stringify!(#bit_size)
)))
)?;


if (__deku_pad % 8) == 0 {
let bytes_read = __deku_pad / 8;
let mut buf = vec![0; bytes_read];
// TODO: use skip_bytes, or Seek in the future?
let _ = __deku_reader.read_bytes(bytes_read, &mut buf, ::#crate_::ctx::Order::default())?;
} else {
// TODO: use skip_bits, or Seek in the future?
let _ = __deku_reader.read_bits(__deku_pad, ::#crate_::ctx::Order::default())?;
}
}
}
}
Expand All @@ -620,7 +652,7 @@ fn emit_padding_bytes(bit_size: &TokenStream) -> TokenStream {


let mut buf = alloc::vec![0; __deku_pad];
let _ = __deku_reader.read_bytes(__deku_pad, &mut buf)?;
let _ = __deku_reader.read_bytes(__deku_pad, &mut buf, ::#crate_::ctx::Order::default())?;
}
}
}
Expand All @@ -636,6 +668,7 @@ fn emit_field_read(
let field_type = &f.ty;

let field_endian = f.endian.as_ref().or(input.endian.as_ref());
let field_bit_order = f.bit_order.as_ref().or(input.bit_order.as_ref());

let field_reader = &f.reader;

Expand Down Expand Up @@ -752,6 +785,7 @@ fn emit_field_read(
None,
f.bytes.as_ref(),
f.ctx.as_ref(),
field_bit_order,
)?;

// The __deku_reader limiting options are special, we need to generate `(limit, (other, ..))` for them.
Expand Down Expand Up @@ -891,12 +925,14 @@ fn emit_field_read(
let pad_bits_before = crate::macros::pad_bits(
f.pad_bits_before.as_ref(),
f.pad_bytes_before.as_ref(),
field_bit_order,
emit_padding,
);
#[cfg(feature = "bits")]
let pad_bits_after = crate::macros::pad_bits(
f.pad_bits_after.as_ref(),
f.pad_bytes_after.as_ref(),
field_bit_order,
emit_padding,
);

Expand Down
53 changes: 38 additions & 15 deletions deku-derive/src/macros/deku_write.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,11 @@ use std::convert::TryFrom;
use darling::ast::{Data, Fields};
use proc_macro2::TokenStream;
use quote::quote;
use syn::LitStr;

use crate::macros::{
assertion_failed, gen_ctx_types_and_arg, gen_field_args, gen_struct_destruction,
token_contains_string, wrap_default_ctx,
assertion_failed, gen_bit_order_from_str, gen_ctx_types_and_arg, gen_field_args,
gen_struct_destruction, token_contains_string, wrap_default_ctx,
};
use crate::{DekuData, DekuDataEnum, DekuDataStruct, FieldData, Id};

Expand Down Expand Up @@ -484,20 +485,38 @@ fn emit_bit_byte_offsets(
}

#[cfg(feature = "bits")]
fn emit_padding(bit_size: &TokenStream) -> TokenStream {
fn emit_padding(bit_size: &TokenStream, bit_order: Option<&LitStr>) -> TokenStream {
let crate_ = super::get_crate_name();
quote! {
{
use core::convert::TryFrom;
extern crate alloc;
use alloc::borrow::Cow;
let __deku_pad = usize::try_from(#bit_size).map_err(|e|
::#crate_::DekuError::InvalidParam(Cow::from(format!(
"Invalid padding param \"({})\": cannot convert to usize",
stringify!(#bit_size)
)))
)?;
__deku_writer.write_bits(::#crate_::bitvec::bitvec![u8, ::#crate_::bitvec::Msb0; 0; __deku_pad].as_bitslice())?;
if let Some(bit_order) = bit_order {
let order = gen_bit_order_from_str(bit_order).unwrap();
quote! {
{
use core::convert::TryFrom;
extern crate alloc;
use alloc::borrow::Cow;
let __deku_pad = usize::try_from(#bit_size).map_err(|e|
::#crate_::DekuError::InvalidParam(Cow::from(format!(
"Invalid padding param \"({})\": cannot convert to usize",
stringify!(#bit_size)
)))
)?;
__deku_writer.write_bits_order(::#crate_::bitvec::bitvec![u8, ::#crate_::bitvec::Msb0; 0; __deku_pad].as_bitslice(), #order)?;
}
}
} else {
quote! {
{
use core::convert::TryFrom;
extern crate alloc;
use alloc::borrow::Cow;
let __deku_pad = usize::try_from(#bit_size).map_err(|e|
::#crate_::DekuError::InvalidParam(Cow::from(format!(
"Invalid padding param \"({})\": cannot convert to usize",
stringify!(#bit_size)
)))
)?;
__deku_writer.write_bits(::#crate_::bitvec::bitvec![u8, ::#crate_::bitvec::Msb0; 0; __deku_pad].as_bitslice())?;
}
}
}
}
Expand Down Expand Up @@ -531,6 +550,7 @@ fn emit_field_write(
) -> Result<TokenStream, syn::Error> {
let crate_ = super::get_crate_name();
let field_endian = f.endian.as_ref().or(input.endian.as_ref());
let field_bit_order = f.bit_order.as_ref().or(input.bit_order.as_ref());

let seek = if let Some(num) = &f.seek_from_current {
quote! {
Expand Down Expand Up @@ -632,6 +652,7 @@ fn emit_field_write(
None,
f.bytes.as_ref(),
f.ctx.as_ref(),
field_bit_order,
)?;

if f.temp {
Expand All @@ -653,12 +674,14 @@ fn emit_field_write(
let pad_bits_before = crate::macros::pad_bits(
f.pad_bits_before.as_ref(),
f.pad_bytes_before.as_ref(),
field_bit_order,
emit_padding,
);
#[cfg(feature = "bits")]
let pad_bits_after = crate::macros::pad_bits(
f.pad_bits_after.as_ref(),
f.pad_bytes_after.as_ref(),
field_bit_order,
emit_padding,
);

Expand Down
Loading
Loading