Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
93 changes: 79 additions & 14 deletions font-codegen/src/fields.rs
Original file line number Diff line number Diff line change
Expand Up @@ -354,11 +354,21 @@ pub(crate) struct FieldConstructorInfo {
pub(crate) manual_compile_type: bool,
}

fn big_endian(typ: &syn::Ident) -> TokenStream {
fn endian_wrapper(typ: &syn::Ident, is_little_endian: bool) -> TokenStream {
if typ == "u8" {
return quote!(#typ);
} else {
let wrapper_ty = endian_wrapper_ty(is_little_endian);
quote!(#wrapper_ty<#typ>)
}
}

fn endian_wrapper_ty(is_little_endian: bool) -> TokenStream {
if is_little_endian {
quote!(LittleEndian)
} else {
quote!(BigEndian)
}
quote!(BigEndian<#typ>)
}

fn traversal_arm_for_field(
Expand Down Expand Up @@ -517,9 +527,12 @@ impl Field {
pub(crate) fn type_for_record(&self) -> TokenStream {
match &self.typ {
FieldType::Offset { typ, .. } if self.is_nullable() => {
quote!(BigEndian<Nullable<#typ>>)
let endian_ty = endian_wrapper_ty(self.is_little_endian());
quote! { #endian_ty<Nullable<#typ>> }
}
FieldType::Offset { typ, .. } | FieldType::Scalar { typ } => {
endian_wrapper(typ, self.is_little_endian())
}
FieldType::Offset { typ, .. } | FieldType::Scalar { typ } => big_endian(typ),
FieldType::Struct { typ } => typ.to_token_stream(),
FieldType::ComputedArray(array) => {
let inner = array.type_with_lifetime();
Expand All @@ -528,10 +541,11 @@ impl Field {
FieldType::VarLenArray(_) => quote!(compile_error("VarLenArray not used in records?")),
FieldType::Array { inner_typ } => match inner_typ.as_ref() {
FieldType::Offset { typ, .. } if self.is_nullable() => {
quote!(&'a [BigEndian<Nullable<#typ>>])
let endian_ty = endian_wrapper_ty(self.is_little_endian());
quote!(&'a [#endian_ty<Nullable<#typ>>])
}
FieldType::Offset { typ, .. } | FieldType::Scalar { typ } => {
let be = big_endian(typ);
let be = endian_wrapper(typ, self.is_little_endian());
quote!(&'a [#be])
}
FieldType::Struct { typ } => quote!( &'a [#typ] ),
Expand Down Expand Up @@ -605,6 +619,34 @@ impl Field {
self.attrs.conditional.is_some()
}

fn is_little_endian(&self) -> bool {
self.attrs.little_endian.is_some()
}

fn cursor_read_method(&self) -> TokenStream {
if self.is_little_endian() {
quote! { read_scalar_le }
} else {
quote! { read }
}
}

fn cursor_read_endian_method(&self) -> TokenStream {
if self.is_little_endian() {
quote! { read_le }
} else {
quote! { read_be }
}
}

fn data_read_at_method(&self) -> TokenStream {
if self.is_little_endian() {
quote! { read_scalar_le_at }
} else {
quote! { read_at }
}
}

/// Sanity check we are in a sane state for the end of phase
fn sanity_check(&self, phase: Phase) -> syn::Result<()> {
check_resolution(phase, &self.typ)?;
Expand Down Expand Up @@ -754,10 +796,11 @@ impl Field {
| FieldType::Struct { typ } => typ.to_token_stream(),
FieldType::Array { inner_typ } => match inner_typ.as_ref() {
FieldType::Offset { typ, .. } if self.is_nullable() => {
quote!(&'a [BigEndian<Nullable<#typ>>])
let endian_ty = endian_wrapper_ty(self.is_little_endian());
quote!(&'a [#endian_ty<Nullable<#typ>>])
}
FieldType::Offset { typ, .. } | FieldType::Scalar { typ } => {
let be = big_endian(typ);
let be = endian_wrapper(typ, self.is_little_endian());
quote!(&'a [#be])
}
FieldType::Struct { typ } => quote!(&'a [#typ]),
Expand Down Expand Up @@ -820,7 +863,8 @@ impl Field {
} else if is_array {
quote!(self.data.read_array(range).unwrap())
} else {
quote!(self.data.read_at(range.start).unwrap())
let read_at = self.data_read_at_method();
quote!(self.data.#read_at(range.start).unwrap())
};
if is_versioned {
read_stmt = quote!(Some(#read_stmt));
Expand Down Expand Up @@ -956,6 +1000,20 @@ impl Field {
} else {
quote!(ArrayOfOffsets)
};
let wrapper_type = if offset_type == "u8" {
if self.is_nullable() {
quote!(Nullable<u8>)
} else {
quote!(u8)
}
} else {
let endian = endian_wrapper_ty(self.is_little_endian());
if self.is_nullable() {
quote!(#endian<Nullable<#offset_type>>)
} else {
quote!(#endian<#offset_type>)
}
};

let target_lifetime = (!target_is_generic).then(|| quote!(<'a>));

Expand All @@ -965,7 +1023,7 @@ impl Field {
quote!(())
};
let mut return_type =
quote!( #array_type<'a, #target_ident #target_lifetime, #offset_type> );
quote!( #array_type<'a, #target_ident #target_lifetime, #wrapper_type> );
let mut body = quote!(#array_type::new(offsets, data, #args_token));
if self.is_conditional() {
return_type = quote!( Option< #return_type > );
Expand Down Expand Up @@ -1100,9 +1158,11 @@ impl Field {
assert!(!self.is_array());
let typ = self.typ.cooked_type_tokens();
let condition = condition.condition_tokens_for_read();
let read_method = self.cursor_read_method();

if self.read_at_parse_time {
quote! {
let #name = #condition.then(|| cursor.read::<#typ>()).transpose()?.unwrap_or_default();
let #name = #condition.then(|| cursor.#read_method::<#typ>()).transpose()?.unwrap_or_default();
}
} else {
quote! {
Expand All @@ -1111,7 +1171,8 @@ impl Field {
}
} else if self.read_at_parse_time {
let typ = self.typ.cooked_type_tokens();
quote! ( let #name: #typ = cursor.read()?; )
let read_method = self.cursor_read_method();
quote! ( let #name: #typ = cursor.#read_method()?; )
} else {
panic!("who wrote this garbage anyway?");
};
Expand Down Expand Up @@ -1221,7 +1282,8 @@ impl Field {
// directly
quote!(cursor.read()?)
} else {
quote!(cursor.read_be()?)
let method = self.cursor_read_endian_method();
quote!(cursor.#method()?)
}
}
_ => match self
Expand All @@ -1231,7 +1293,10 @@ impl Field {
.map(FieldReadArgs::to_tokens_for_validation)
{
Some(args) => quote!(cursor.read_with_args(&#args)?),
None => quote!(cursor.read_be()?),
None => {
let method = self.cursor_read_endian_method();
quote!(cursor.#method()?)
}
},
};
quote!( #name : #rhs )
Expand Down
13 changes: 13 additions & 0 deletions font-codegen/src/parsing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@ pub(crate) struct TableFormat {
pub(crate) format: syn::Ident,
pub(crate) format_offset: Option<syn::LitInt>,
pub(crate) variants: Vec<FormatVariant>,
pub(crate) little_endian: bool,
}

#[derive(Debug, Clone)]
Expand Down Expand Up @@ -183,6 +184,7 @@ pub(crate) struct FieldAttrs {
pub(crate) nullable: Option<syn::Path>,
pub(crate) conditional: Option<Attr<Condition>>,
pub(crate) skip_getter: Option<syn::Path>,
pub(crate) little_endian: Option<syn::Path>,
/// specify that an offset getter has a custom impl
pub(crate) offset_getter: Option<Attr<syn::Ident>>,
/// optionally a method on the parent type used to generate the offset data
Expand Down Expand Up @@ -492,6 +494,7 @@ mod kw {
syn::custom_keyword!(record);
syn::custom_keyword!(flags);
syn::custom_keyword!(format);
syn::custom_keyword!(little_endian);
syn::custom_keyword!(group);
syn::custom_keyword!(skip);
syn::custom_keyword!(scalar);
Expand Down Expand Up @@ -672,6 +675,12 @@ impl Parse for TableFormat {
fn parse(input: ParseStream) -> syn::Result<Self> {
let attrs: TableAttrs = input.parse()?;
let _kw = input.parse::<kw::format>()?;
let little_endian = if input.peek(kw::little_endian) {
input.parse::<kw::little_endian>()?;
true
} else {
false
};
let format: syn::Ident = input.parse()?;
let format_offset = if input.peek(Token![@]) {
input.parse::<Token![@]>()?;
Expand Down Expand Up @@ -705,6 +714,7 @@ impl Parse for TableFormat {
name,
variants,
format_offset,
little_endian,
})
}
}
Expand Down Expand Up @@ -1040,6 +1050,7 @@ impl FieldAttrs {
static DOC: &str = "doc";
static NULLABLE: &str = "nullable";
static SKIP_GETTER: &str = "skip_getter";
static LITTLE_ENDIAN: &str = "little_endian";
static COUNT: &str = "count";
static SINCE_VERSION: &str = "since_version";
static IF_COND: &str = "if_cond";
Expand Down Expand Up @@ -1078,6 +1089,8 @@ impl Parse for FieldAttrs {
this.nullable = Some(attr.path().clone());
} else if ident == SKIP_GETTER {
this.skip_getter = Some(attr.path().clone());
} else if ident == LITTLE_ENDIAN {
this.little_endian = Some(attr.path().clone());
} else if ident == OFFSET_GETTER {
this.offset_getter = Some(Attr::new(ident.clone(), attr.parse_args()?));
} else if ident == OFFSET_DATA {
Expand Down
7 changes: 6 additions & 1 deletion font-codegen/src/table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -824,6 +824,11 @@ pub(crate) fn generate_format_group(item: &TableFormat, items: &Items) -> syn::R
quote!(Self::#name(table) => table)
});

let format_read_method = if item.little_endian {
quote! { read_scalar_le_at }
} else {
quote! { read_at }
};
let format_offset = item
.format_offset
.as_ref()
Expand Down Expand Up @@ -859,7 +864,7 @@ pub(crate) fn generate_format_group(item: &TableFormat, items: &Items) -> syn::R

impl<'a> FontRead<'a> for #name<'a> {
fn read(data: FontData<'a>) -> Result<Self, ReadError> {
let format: #format = data.read_at(#format_offset)?;
let format: #format = data.#format_read_method(#format_offset)?;
#maybe_allow_lint
match format {
#( #match_arms ),*
Expand Down
4 changes: 3 additions & 1 deletion font-types/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ mod name_id;
mod offset;
mod point;
mod raw;
mod raw_le;
mod tag;
mod uint24;
mod version;
Expand All @@ -41,7 +42,8 @@ pub use longdatetime::LongDateTime;
pub use name_id::NameId;
pub use offset::{Nullable, Offset16, Offset24, Offset32};
pub use point::Point;
pub use raw::{BigEndian, FixedSize, Scalar};
pub use raw::{BigEndian, BytesWrapper, FixedSize, Scalar};
pub use raw_le::{LittleEndian, ScalarLE};
pub use tag::{InvalidTag, Tag};
pub use uint24::Uint24;
pub use version::{Compatible, MajorMinor, Version16Dot16};
Expand Down
38 changes: 28 additions & 10 deletions font-types/src/raw.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ pub trait FixedSize: Sized {

/// we hide this trait; it isn't part of the public API, and this clarifies
/// the guarantee that it is only implemented for [u8; N]
mod sealed {
pub(super) mod sealed {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

if we keep this I think it's fine to rename the trait to just ByteArray?

/// A trait representing any fixed-size big-endian byte array.
///
/// This is only used in `Scalar`, as a way of expressing the condition that the
Expand All @@ -68,6 +68,21 @@ mod sealed {
}
}

/// A trait for types that contain a byte slice which can be decoded to a scalar value.
pub trait BytesWrapper: Sized {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If we keep this I would prefer to call this something like EndianWrapper or something to more clearly communicate that this is a common interface for the LittleEndian and BigEndian types. (I'd also clarify this in the docs)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The other thing that this type encodes is that the underlying byte slice doesn't have the same alignment requirements as the scalar value it decodes to. Not sure how important that is to mention, though.

type Inner: Sized;
/// Attempt to construct a new raw value from this slice.
///
/// This will fail if `slice.len() != T::RAW_BYTE_LEN`.
fn from_slice(slice: &[u8]) -> Option<Self>;

/// Get the scalar value of the bytes contained in this byte wrapper.
fn get(&self) -> Self::Inner;

/// Set the value, overwriting the bytes.
fn set(&mut self, value: Self::Inner);
}

/// A wrapper around raw big-endian bytes for some type.
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
Expand All @@ -83,29 +98,32 @@ unsafe impl<T> bytemuck::Zeroable for BigEndian<T> where T: Scalar + Copy {}
#[cfg(feature = "bytemuck")]
unsafe impl<T> bytemuck::AnyBitPattern for BigEndian<T> where T: Scalar + Copy + 'static {}

impl<T: Scalar> BigEndian<T> {
/// construct a new `BigEndian<T>` from raw bytes
pub fn new(raw: T::Raw) -> BigEndian<T> {
BigEndian(raw)
}

impl<T: Scalar> BytesWrapper for BigEndian<T> {
type Inner = T;
/// Attempt to construct a new raw value from this slice.
///
/// This will fail if `slice.len() != T::RAW_BYTE_LEN`.
pub fn from_slice(slice: &[u8]) -> Option<Self> {
fn from_slice(slice: &[u8]) -> Option<Self> {
sealed::BeByteArray::from_slice(slice).map(Self)
}

/// Convert this raw type to its native representation.
#[inline(always)]
pub fn get(&self) -> T {
fn get(&self) -> T {
T::from_raw(self.0)
}

/// Set the value, overwriting the bytes.
pub fn set(&mut self, value: T) {
fn set(&mut self, value: T) {
self.0 = value.to_raw();
}
}

impl<T: Scalar> BigEndian<T> {
/// construct a new `BigEndian<T>` from raw bytes
pub fn new(raw: T::Raw) -> BigEndian<T> {
BigEndian(raw)
}

/// Get the raw big-endian bytes.
pub fn be_bytes(&self) -> &[u8] {
Expand Down
Loading
Loading