diff --git a/pdl-compiler/src/analyzer.rs b/pdl-compiler/src/analyzer.rs index b80a6b6..ed87b39 100644 --- a/pdl-compiler/src/analyzer.rs +++ b/pdl-compiler/src/analyzer.rs @@ -19,128 +19,70 @@ use codespan_reporting::term::termcolor; use std::collections::HashMap; use crate::ast::*; -use crate::parser::ast as parser_ast; - -pub mod ast { - use serde::Serialize; - - /// Field and declaration size information. - #[derive(Debug, Clone, Copy, PartialEq, Eq)] - #[allow(unused)] - pub enum Size { - /// Constant size in bits. - Static(usize), - /// Size indicated at packet parsing by a size or count field. - /// The parameter is the static part of the size. - Dynamic, - /// The size cannot be determined statically or at runtime. - /// The packet assumes the largest possible size. - Unknown, - } - - // TODO: use derive(Default) when UWB is using Rust 1.62.0. - #[allow(clippy::derivable_impls)] - impl Default for Size { - fn default() -> Size { - Size::Unknown - } - } - - #[derive(Debug, Serialize, Default, Clone, PartialEq)] - pub struct Annotation; - - #[derive(Default, Debug, Clone, PartialEq, Eq)] - pub struct FieldAnnotation { - // Size of field. - pub size: Size, - // Size of field with padding bytes. - // This information exists only for array fields. - pub padded_size: Option, - } - - #[derive(Default, Debug, Clone, PartialEq, Eq)] - pub struct DeclAnnotation { - // Size computed excluding the payload. - pub size: Size, - // Payload size, or Static(0) if the declaration does not - // have a payload. - pub payload_size: Size, - } - - impl std::ops::Add for Size { - type Output = Size; - fn add(self, rhs: Size) -> Self::Output { - match (self, rhs) { - (Size::Unknown, _) | (_, Size::Unknown) => Size::Unknown, - (Size::Dynamic, _) | (_, Size::Dynamic) => Size::Dynamic, - (Size::Static(lhs), Size::Static(rhs)) => Size::Static(lhs + rhs), - } - } - } - impl std::ops::Mul for Size { - type Output = Size; - fn mul(self, rhs: Size) -> Self::Output { - match (self, rhs) { - (Size::Unknown, _) | (_, Size::Unknown) => Size::Unknown, - (Size::Dynamic, _) | (_, Size::Dynamic) => Size::Dynamic, - (Size::Static(lhs), Size::Static(rhs)) => Size::Static(lhs * rhs), - } - } - } +/// Field and declaration size information. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[allow(unused)] +pub enum Size { + /// Constant size in bits. + Static(usize), + /// Size indicated at packet parsing by a size or count field. + /// The parameter is the static part of the size. + Dynamic, + /// The size cannot be determined statically or at runtime. + /// The packet assumes the largest possible size. + Unknown, +} - impl std::ops::Mul for Size { - type Output = Size; - fn mul(self, rhs: usize) -> Self::Output { - match self { - Size::Unknown => Size::Unknown, - Size::Dynamic => Size::Dynamic, - Size::Static(lhs) => Size::Static(lhs * rhs), - } - } +// TODO: use derive(Default) when UWB is using Rust 1.62.0. +#[allow(clippy::derivable_impls)] +impl Default for Size { + fn default() -> Size { + Size::Unknown } +} - impl Size { - // Returns the width if the size is static. - pub fn static_(&self) -> Option { - match self { - Size::Static(size) => Some(*size), - Size::Dynamic | Size::Unknown => None, - } +impl std::ops::Add for Size { + type Output = Size; + fn add(self, rhs: Size) -> Self::Output { + match (self, rhs) { + (Size::Unknown, _) | (_, Size::Unknown) => Size::Unknown, + (Size::Dynamic, _) | (_, Size::Dynamic) => Size::Dynamic, + (Size::Static(lhs), Size::Static(rhs)) => Size::Static(lhs + rhs), } } +} - impl DeclAnnotation { - pub fn total_size(&self) -> Size { - self.size + self.payload_size +impl std::ops::Mul for Size { + type Output = Size; + fn mul(self, rhs: Size) -> Self::Output { + match (self, rhs) { + (Size::Unknown, _) | (_, Size::Unknown) => Size::Unknown, + (Size::Dynamic, _) | (_, Size::Dynamic) => Size::Dynamic, + (Size::Static(lhs), Size::Static(rhs)) => Size::Static(lhs * rhs), } } +} - impl FieldAnnotation { - pub fn new(size: Size) -> Self { - FieldAnnotation { size, padded_size: None } - } - - // Returns the field width or padded width if static. - pub fn static_(&self) -> Option { - match self.padded_size { - Some(padding) => Some(8 * padding), - None => self.size.static_(), - } +impl std::ops::Mul for Size { + type Output = Size; + fn mul(self, rhs: usize) -> Self::Output { + match self { + Size::Unknown => Size::Unknown, + Size::Dynamic => Size::Dynamic, + Size::Static(lhs) => Size::Static(lhs * rhs), } } +} - impl crate::ast::Annotation for Annotation { - type FieldAnnotation = FieldAnnotation; - type DeclAnnotation = DeclAnnotation; +impl Size { + // Returns the width if the size is static. + pub fn static_(&self) -> Option { + match self { + Size::Static(size) => Some(*size), + Size::Dynamic | Size::Unknown => None, + } } - - #[allow(unused)] - pub type Field = crate::ast::Field; - #[allow(unused)] - pub type Decl = crate::ast::Decl; - #[allow(unused)] - pub type File = crate::ast::File; } /// List of unique errors reported as analyzer diagnostics. @@ -212,12 +154,20 @@ pub struct Diagnostics { /// Gather information about the full AST. #[derive(Debug)] -pub struct Scope<'d, A: Annotation = ast::Annotation> { +pub struct Scope<'d> { /// Reference to the source file. - pub file: &'d crate::ast::File, + pub file: &'d File, /// Collection of Group, Packet, Enum, Struct, Checksum, and CustomField /// declarations. - pub typedef: HashMap>, + pub typedef: HashMap, +} + +/// Gather size information about the full AST. +#[derive(Debug)] +pub struct Schema { + size: HashMap, + padded_size: HashMap>, + payload_size: HashMap, } impl Diagnostics { @@ -250,10 +200,10 @@ impl Diagnostics { } } -impl<'d, A: Annotation + Default> Scope<'d, A> { - pub fn new(file: &'d crate::ast::File) -> Result, Diagnostics> { +impl<'d> Scope<'d> { + pub fn new(file: &'d File) -> Result, Diagnostics> { // Gather top-level declarations. - let mut scope: Scope = Scope { file, typedef: Default::default() }; + let mut scope: Scope = Scope { file, typedef: Default::default() }; let mut diagnostics: Diagnostics = Default::default(); for decl in &file.declarations { if let Some(id) = decl.id() { @@ -286,24 +236,18 @@ impl<'d, A: Annotation + Default> Scope<'d, A> { } /// Iterate over the child declarations of the selected declaration. - pub fn iter_children<'s>( - &'s self, - decl: &'d crate::ast::Decl, - ) -> impl Iterator> + 's { + pub fn iter_children<'s>(&'s self, decl: &'d Decl) -> impl Iterator + 's { self.file.iter_children(decl) } /// Return the parent declaration of the selected declaration, /// if it has one. - pub fn get_parent(&self, decl: &crate::ast::Decl) -> Option<&'d crate::ast::Decl> { + pub fn get_parent(&self, decl: &Decl) -> Option<&'d Decl> { decl.parent_id().and_then(|parent_id| self.typedef.get(parent_id).cloned()) } /// Iterate over the parent declarations of the selected declaration. - pub fn iter_parents<'s>( - &'s self, - decl: &'d crate::ast::Decl, - ) -> impl Iterator> + 's { + pub fn iter_parents<'s>(&'s self, decl: &'d Decl) -> impl Iterator + 's { std::iter::successors(self.get_parent(decl), |decl| self.get_parent(decl)) } @@ -311,24 +255,21 @@ impl<'d, A: Annotation + Default> Scope<'d, A> { /// including the current declaration. pub fn iter_parents_and_self<'s>( &'s self, - decl: &'d crate::ast::Decl, - ) -> impl Iterator> + 's { + decl: &'d Decl, + ) -> impl Iterator + 's { std::iter::successors(Some(decl), |decl| self.get_parent(decl)) } /// Iterate over the declaration and its parent's fields. - pub fn iter_fields<'s>( - &'s self, - decl: &'d crate::ast::Decl, - ) -> impl Iterator> + 's { + pub fn iter_fields<'s>(&'s self, decl: &'d Decl) -> impl Iterator + 's { std::iter::successors(Some(decl), |decl| self.get_parent(decl)).flat_map(Decl::fields) } /// Iterate over the declaration parent's fields. pub fn iter_parent_fields<'s>( &'s self, - decl: &'d crate::ast::Decl, - ) -> impl Iterator> + 's { + decl: &'d Decl, + ) -> impl Iterator + 's { std::iter::successors(self.get_parent(decl), |decl| self.get_parent(decl)) .flat_map(Decl::fields) } @@ -336,16 +277,13 @@ impl<'d, A: Annotation + Default> Scope<'d, A> { /// Iterate over the declaration and its parent's constraints. pub fn iter_constraints<'s>( &'s self, - decl: &'d crate::ast::Decl, + decl: &'d Decl, ) -> impl Iterator + 's { std::iter::successors(Some(decl), |decl| self.get_parent(decl)).flat_map(Decl::constraints) } /// Return the type declaration for the selected field, if applicable. - pub fn get_type_declaration( - &self, - field: &crate::ast::Field, - ) -> Option<&'d crate::ast::Decl> { + pub fn get_type_declaration(&self, field: &Field) -> Option<&'d Decl> { match &field.desc { FieldDesc::Checksum { .. } | FieldDesc::Padding { .. } @@ -367,7 +305,7 @@ impl<'d, A: Annotation + Default> Scope<'d, A> { } /// Test if the selected field is a bit-field. - pub fn is_bitfield(&self, field: &crate::ast::Field) -> bool { + pub fn is_bitfield(&self, field: &Field) -> bool { match &field.desc { FieldDesc::Size { .. } | FieldDesc::Count { .. } @@ -386,6 +324,162 @@ impl<'d, A: Annotation + Default> Scope<'d, A> { } } +impl Schema { + /// Check correct definition of packet sizes. + /// Annotate fields and declarations with the size in bits. + pub fn new(file: &File) -> Schema { + fn annotate_decl(schema: &mut Schema, scope: &HashMap, decl: &Decl) { + let mut padding = None; + let mut size = Size::Static(0); + let mut payload_size = Size::Static(0); + + for field in decl.fields() { + // Compute the size of each declared fields. + let field_size = annotate_field(schema, scope, decl, field); + + // Sum the size of the non payload fields to get the + // declaration size. Lookup the payload field size. + match &field.desc { + FieldDesc::Payload { .. } | FieldDesc::Body { .. } => payload_size = field_size, + _ => { + size = size + + match padding { + Some(padding) => Size::Static(8 * padding), + None => field_size, + } + } + } + + // Add the padding information to the fields. + schema.padded_size.insert(field.key, padding); + padding = match &field.desc { + FieldDesc::Padding { size } => Some(*size), + _ => None, + }; + } + + // Save the declaration size. + let (size, payload_size) = match &decl.desc { + DeclDesc::Packet { .. } | DeclDesc::Struct { .. } | DeclDesc::Group { .. } => { + (size, payload_size) + } + DeclDesc::Enum { width, .. } + | DeclDesc::Checksum { width, .. } + | DeclDesc::CustomField { width: Some(width), .. } => { + (Size::Static(*width), Size::Static(0)) + } + DeclDesc::CustomField { width: None, .. } => (Size::Dynamic, Size::Static(0)), + DeclDesc::Test { .. } => (Size::Static(0), Size::Static(0)), + }; + + schema.size.insert(decl.key, size); + schema.payload_size.insert(decl.key, payload_size); + } + + fn annotate_field( + schema: &mut Schema, + scope: &HashMap, + decl: &Decl, + field: &Field, + ) -> Size { + let size = match &field.desc { + _ if field.cond.is_some() => Size::Dynamic, + FieldDesc::Checksum { .. } | FieldDesc::Padding { .. } => Size::Static(0), + FieldDesc::Size { width, .. } + | FieldDesc::Count { width, .. } + | FieldDesc::ElementSize { width, .. } + | FieldDesc::FixedScalar { width, .. } + | FieldDesc::Reserved { width } + | FieldDesc::Scalar { width, .. } => Size::Static(*width), + FieldDesc::Flag { .. } => Size::Static(1), + FieldDesc::Body | FieldDesc::Payload { .. } => { + let has_payload_size = decl.fields().any(|field| match &field.desc { + FieldDesc::Size { field_id, .. } => { + field_id == "_body_" || field_id == "_payload_" + } + _ => false, + }); + if has_payload_size { + Size::Dynamic + } else { + Size::Unknown + } + } + FieldDesc::Typedef { type_id, .. } + | FieldDesc::FixedEnum { enum_id: type_id, .. } + | FieldDesc::Group { group_id: type_id, .. } => { + let type_key = scope.get(type_id).unwrap(); + schema.total_size(*type_key) + } + FieldDesc::Array { width: Some(width), size: Some(size), .. } => { + Size::Static(*size * *width) + } + FieldDesc::Array { + width: None, size: Some(size), type_id: Some(type_id), .. + } => { + let type_key = scope.get(type_id).unwrap(); + schema.total_size(*type_key) * *size + } + FieldDesc::Array { id, size: None, .. } => { + // The element does not matter when the size of the array is + // not static. The array size depends on there being a count + // or size field or not. + let has_array_size = decl.fields().any(|field| match &field.desc { + FieldDesc::Size { field_id, .. } | FieldDesc::Count { field_id, .. } => { + field_id == id + } + _ => false, + }); + if has_array_size { + Size::Dynamic + } else { + Size::Unknown + } + } + FieldDesc::Array { .. } => unreachable!(), + }; + + schema.size.insert(field.key, size); + size + } + + let mut scope = HashMap::new(); + for decl in &file.declarations { + if let Some(id) = decl.id() { + scope.insert(id.to_owned(), decl.key); + } + } + + let mut schema = Schema { + size: Default::default(), + padded_size: Default::default(), + payload_size: Default::default(), + }; + + for decl in &file.declarations { + annotate_decl(&mut schema, &scope, decl); + } + + schema + } + + pub fn size(&self, key: usize) -> Size { + *self.size.get(&key).unwrap() + } + + pub fn padded_size(&self, key: usize) -> Option { + *self.padded_size.get(&key).unwrap() + } + + pub fn payload_size(&self, key: usize) -> Size { + *self.payload_size.get(&key).unwrap() + } + + pub fn total_size(&self, key: usize) -> Size { + self.size(key) + self.payload_size(key) + } +} + /// Return the bit-width of a scalar value. fn bit_width(value: usize) -> usize { usize::BITS as usize - value.leading_zeros() as usize @@ -411,10 +505,7 @@ fn scalar_max(width: usize) -> usize { /// - undeclared test identifier /// - invalid test identifier /// - recursive declaration -fn check_decl_identifiers( - file: &parser_ast::File, - scope: &Scope, -) -> Result<(), Diagnostics> { +fn check_decl_identifiers(file: &File, scope: &Scope) -> Result<(), Diagnostics> { enum Mark { Temporary, Permanent, @@ -425,9 +516,9 @@ fn check_decl_identifiers( } fn bfs<'d>( - decl: &'d parser_ast::Decl, + decl: &'d Decl, context: &mut Context<'d>, - scope: &Scope<'d, parser_ast::Annotation>, + scope: &Scope<'d>, diagnostics: &mut Diagnostics, ) { let decl_id = decl.id().unwrap(); @@ -586,7 +677,7 @@ fn check_decl_identifiers( /// Check field identifiers. /// Raises error diagnostics for the following cases: /// - duplicate field identifier -fn check_field_identifiers(file: &parser_ast::File) -> Result<(), Diagnostics> { +fn check_field_identifiers(file: &File) -> Result<(), Diagnostics> { let mut diagnostics: Diagnostics = Default::default(); for decl in &file.declarations { let mut local_scope = HashMap::new(); @@ -620,7 +711,7 @@ fn check_field_identifiers(file: &parser_ast::File) -> Result<(), Diagnostics> { /// Raises error diagnostics for the following cases: /// - duplicate tag identifier /// - duplicate tag value -fn check_enum_declarations(file: &parser_ast::File) -> Result<(), Diagnostics> { +fn check_enum_declarations(file: &File) -> Result<(), Diagnostics> { // Return the inclusive range with bounds correctly ordered. // The analyzer will raise an error if the bounds are incorrectly ordered, but this // will enable additional checks. @@ -855,8 +946,8 @@ fn check_enum_declarations(file: &parser_ast::File) -> Result<(), Diagnostics> { /// Helper function for validating one constraint. fn check_constraint( constraint: &Constraint, - decl: &parser_ast::Decl, - scope: &Scope, + decl: &Decl, + scope: &Scope, diagnostics: &mut Diagnostics, ) { match scope.iter_fields(decl).find(|field| field.id() == Some(&constraint.id)) { @@ -985,8 +1076,8 @@ fn check_constraint( /// Helper function for validating a list of constraints. fn check_constraints_list<'d>( constraints: &'d [Constraint], - parent_decl: &parser_ast::Decl, - scope: &Scope, + parent_decl: &Decl, + scope: &Scope, mut constraints_by_id: HashMap, diagnostics: &mut Diagnostics, ) { @@ -1018,10 +1109,7 @@ fn check_constraints_list<'d>( /// - invalid constraint enum value (bad type) /// - invalid constraint enum value (undeclared tag) /// - duplicate constraint -fn check_decl_constraints( - file: &parser_ast::File, - scope: &Scope, -) -> Result<(), Diagnostics> { +fn check_decl_constraints(file: &File, scope: &Scope) -> Result<(), Diagnostics> { let mut diagnostics: Diagnostics = Default::default(); for decl in &file.declarations { // Check constraints for packet inheritance. @@ -1060,10 +1148,7 @@ fn check_decl_constraints( /// - invalid constraint enum value (bad type) /// - invalid constraint enum value (undeclared tag) /// - duplicate constraint -fn check_group_constraints( - file: &parser_ast::File, - scope: &Scope, -) -> Result<(), Diagnostics> { +fn check_group_constraints(file: &File, scope: &Scope) -> Result<(), Diagnostics> { let mut diagnostics: Diagnostics = Default::default(); for decl in &file.declarations { // Check constraints for group inlining. @@ -1095,7 +1180,7 @@ fn check_group_constraints( /// - undeclared elementsize identifier /// - invalid elementsize identifier /// - duplicate elementsize field -fn check_size_fields(file: &parser_ast::File) -> Result<(), Diagnostics> { +fn check_size_fields(file: &File) -> Result<(), Diagnostics> { let mut diagnostics: Diagnostics = Default::default(); for decl in &file.declarations { let mut size_for_id = HashMap::new(); @@ -1223,10 +1308,7 @@ fn check_size_fields(file: &parser_ast::File) -> Result<(), Diagnostics> { /// - undeclared enum identifier /// - invalid enum identifier /// - undeclared tag identifier -fn check_fixed_fields( - file: &parser_ast::File, - scope: &Scope, -) -> Result<(), Diagnostics> { +fn check_fixed_fields(file: &File, scope: &Scope) -> Result<(), Diagnostics> { let mut diagnostics: Diagnostics = Default::default(); for decl in &file.declarations { for field in decl.fields() { @@ -1291,16 +1373,16 @@ fn check_fixed_fields( /// - duplicate body field /// - duplicate body field size /// - missing payload field -fn check_payload_fields(file: &parser_ast::File) -> Result<(), Diagnostics> { +fn check_payload_fields(file: &File) -> Result<(), Diagnostics> { // Check whether the declaration requires a payload field. // The payload is required if any child packets declares fields. - fn requires_payload(file: &parser_ast::File, decl: &parser_ast::Decl) -> bool { + fn requires_payload(file: &File, decl: &Decl) -> bool { file.iter_children(decl).any(|child| child.fields().next().is_some()) } let mut diagnostics: Diagnostics = Default::default(); for decl in &file.declarations { - let mut payload: Option<&parser_ast::Field> = None; + let mut payload: Option<&Field> = None; for field in decl.fields() { match &field.desc { FieldDesc::Payload { .. } | FieldDesc::Body { .. } => { @@ -1345,7 +1427,7 @@ fn check_payload_fields(file: &parser_ast::File) -> Result<(), Diagnostics> { /// Check array fields. /// Raises error diagnostics for the following cases: /// - redundant array field size -fn check_array_fields(file: &parser_ast::File) -> Result<(), Diagnostics> { +fn check_array_fields(file: &File) -> Result<(), Diagnostics> { let mut diagnostics: Diagnostics = Default::default(); for decl in &file.declarations { for field in decl.fields() { @@ -1379,7 +1461,7 @@ fn check_array_fields(file: &parser_ast::File) -> Result<(), Diagnostics> { /// Check padding fields. /// Raises error diagnostics for the following cases: /// - padding field not following an array field -fn check_padding_fields(file: &parser_ast::File) -> Result<(), Diagnostics> { +fn check_padding_fields(file: &File) -> Result<(), Diagnostics> { let mut diagnostics: Diagnostics = Default::default(); for decl in &file.declarations { let mut previous_is_array = false; @@ -1405,10 +1487,7 @@ fn check_padding_fields(file: &parser_ast::File) -> Result<(), Diagnostics> { /// - checksum field precedes checksum start /// - undeclared checksum field /// - invalid checksum field -fn check_checksum_fields( - _file: &parser_ast::File, - _scope: &Scope, -) -> Result<(), Diagnostics> { +fn check_checksum_fields(_file: &File, _scope: &Scope) -> Result<(), Diagnostics> { // TODO Ok(()) } @@ -1419,11 +1498,11 @@ fn check_checksum_fields( /// - invalid constraint identifier /// - invalid constraint scalar value (bad type) /// - invalid constraint scalar value (overflow) -fn check_optional_fields(file: &parser_ast::File) -> Result<(), Diagnostics> { +fn check_optional_fields(file: &File) -> Result<(), Diagnostics> { let mut diagnostics: Diagnostics = Default::default(); for decl in &file.declarations { - let mut local_scope: HashMap = HashMap::new(); - let mut condition_ids: HashMap = HashMap::new(); + let mut local_scope: HashMap = HashMap::new(); + let mut condition_ids: HashMap = HashMap::new(); for field in decl.fields() { if let Some(ref cond) = field.cond { match &field.desc { @@ -1527,167 +1606,13 @@ fn check_optional_fields(file: &parser_ast::File) -> Result<(), Diagnostics> { diagnostics.err_or(()) } -/// Check correct definition of packet sizes. -/// Annotate fields and declarations with the size in bits. -fn compute_field_sizes(file: &parser_ast::File) -> ast::File { - fn annotate_decl( - decl: &parser_ast::Decl, - scope: &HashMap, - ) -> ast::Decl { - // Annotate the declaration fields. - // Add the padding information to the fields in the same pass. - let mut decl = decl.annotate(Default::default(), |fields| { - let mut fields: Vec<_> = - fields.iter().map(|field| annotate_field(decl, field, scope)).collect(); - let mut padding = None; - for field in fields.iter_mut().rev() { - field.annot.padded_size = padding; - padding = match &field.desc { - FieldDesc::Padding { size } => Some(*size), - _ => None, - }; - } - fields - }); - - // Compute the declaration annotation. - decl.annot = match &decl.desc { - DeclDesc::Packet { fields, .. } - | DeclDesc::Struct { fields, .. } - | DeclDesc::Group { fields, .. } => { - let mut size = decl - .parent_id() - .and_then(|parent_id| scope.get(parent_id)) - .map(|annot| annot.size) - .unwrap_or(ast::Size::Static(0)); - let mut payload_size = ast::Size::Static(0); - for field in fields { - match &field.desc { - FieldDesc::Payload { .. } | FieldDesc::Body { .. } => { - payload_size = field.annot.size - } - _ => { - size = size - + match field.annot.padded_size { - Some(padding) => ast::Size::Static(8 * padding), - None => field.annot.size, - } - } - } - } - ast::DeclAnnotation { size, payload_size } - } - DeclDesc::Enum { width, .. } - | DeclDesc::Checksum { width, .. } - | DeclDesc::CustomField { width: Some(width), .. } => ast::DeclAnnotation { - size: ast::Size::Static(*width), - payload_size: ast::Size::Static(0), - }, - DeclDesc::CustomField { width: None, .. } => { - ast::DeclAnnotation { size: ast::Size::Dynamic, payload_size: ast::Size::Static(0) } - } - DeclDesc::Test { .. } => ast::DeclAnnotation { - size: ast::Size::Static(0), - payload_size: ast::Size::Static(0), - }, - }; - decl - } - - fn annotate_field( - decl: &parser_ast::Decl, - field: &parser_ast::Field, - scope: &HashMap, - ) -> ast::Field { - field.annotate(match &field.desc { - _ if field.cond.is_some() => ast::FieldAnnotation::new(ast::Size::Dynamic), - FieldDesc::Checksum { .. } | FieldDesc::Padding { .. } => { - ast::FieldAnnotation::new(ast::Size::Static(0)) - } - FieldDesc::Size { width, .. } - | FieldDesc::Count { width, .. } - | FieldDesc::ElementSize { width, .. } - | FieldDesc::FixedScalar { width, .. } - | FieldDesc::Reserved { width } - | FieldDesc::Scalar { width, .. } => { - ast::FieldAnnotation::new(ast::Size::Static(*width)) - } - FieldDesc::Flag { .. } => ast::FieldAnnotation::new(ast::Size::Static(1)), - FieldDesc::Body | FieldDesc::Payload { .. } => { - let has_payload_size = decl.fields().any(|field| match &field.desc { - FieldDesc::Size { field_id, .. } => { - field_id == "_body_" || field_id == "_payload_" - } - _ => false, - }); - ast::FieldAnnotation::new(if has_payload_size { - ast::Size::Dynamic - } else { - ast::Size::Unknown - }) - } - FieldDesc::Typedef { type_id, .. } - | FieldDesc::FixedEnum { enum_id: type_id, .. } - | FieldDesc::Group { group_id: type_id, .. } => { - let type_annot = scope.get(type_id).unwrap(); - ast::FieldAnnotation::new(type_annot.size + type_annot.payload_size) - } - FieldDesc::Array { width: Some(width), size: Some(size), .. } => { - ast::FieldAnnotation::new(ast::Size::Static(*size * *width)) - } - FieldDesc::Array { width: None, size: Some(size), type_id: Some(type_id), .. } => { - let type_annot = scope.get(type_id).unwrap(); - ast::FieldAnnotation::new((type_annot.size + type_annot.payload_size) * *size) - } - FieldDesc::Array { id, size: None, .. } => { - // The element does not matter when the size of the array is - // not static. The array size depends on there being a count - // or size field or not. - let has_array_size = decl.fields().any(|field| match &field.desc { - FieldDesc::Size { field_id, .. } | FieldDesc::Count { field_id, .. } => { - field_id == id - } - _ => false, - }); - ast::FieldAnnotation::new(if has_array_size { - ast::Size::Dynamic - } else { - ast::Size::Unknown - }) - } - FieldDesc::Array { .. } => unreachable!(), - }) - } - - // Construct a scope mapping typedef identifiers to decl annotations. - let mut scope = HashMap::new(); - - // Annotate declarations. - let mut declarations = Vec::new(); - for decl in file.declarations.iter() { - let decl = annotate_decl(decl, &scope); - if let Some(id) = decl.id() { - scope.insert(id.to_string(), decl.annot.clone()); - } - declarations.push(decl); - } - - File { - version: file.version.clone(), - file: file.file, - comments: file.comments.clone(), - endianness: file.endianness, - declarations, - } -} - /// Inline group fields and remove group declarations. -fn inline_groups(file: &parser_ast::File) -> Result { +fn inline_groups(file: &File) -> Result { fn inline_fields<'a>( - fields: impl Iterator, - groups: &HashMap, + fields: impl Iterator, + groups: &HashMap, constraints: &HashMap, - ) -> Vec { + ) -> Vec { fields .flat_map(|field| match &field.desc { FieldDesc::Group { group_id, constraints: group_constraints } => { @@ -1700,18 +1625,18 @@ fn inline_groups(file: &parser_ast::File) -> Result { - vec![parser_ast::Field { + vec![Field { desc: FieldDesc::FixedScalar { width: *width, value: constraints.get(id).unwrap().value.unwrap(), }, loc: field.loc, - annot: field.annot, + key: field.key, cond: field.cond.clone(), }] } FieldDesc::Typedef { id, type_id, .. } if constraints.contains_key(id) => { - vec![parser_ast::Field { + vec![Field { desc: FieldDesc::FixedEnum { enum_id: type_id.clone(), tag_id: constraints @@ -1720,7 +1645,7 @@ fn inline_groups(file: &parser_ast::File) -> Result Result Some(parser_ast::Decl { + DeclDesc::Packet { fields, id, parent_id, constraints } => Some(Decl { desc: DeclDesc::Packet { fields: inline_fields(fields.iter(), &groups, &HashMap::new()), id: id.clone(), @@ -1748,9 +1673,9 @@ fn inline_groups(file: &parser_ast::File) -> Result Some(parser_ast::Decl { + DeclDesc::Struct { fields, id, parent_id, constraints } => Some(Decl { desc: DeclDesc::Struct { fields: inline_fields(fields.iter(), &groups, &HashMap::new()), id: id.clone(), @@ -1758,7 +1683,7 @@ fn inline_groups(file: &parser_ast::File) -> Result None, _ => Some(decl.clone()), @@ -1767,17 +1692,18 @@ fn inline_groups(file: &parser_ast::File) -> Result Result { +pub fn analyze(file: &File) -> Result { let scope = Scope::new(file)?; check_decl_identifiers(file, &scope)?; check_field_identifiers(file)?; @@ -1830,13 +1756,13 @@ pub fn analyze(file: &parser_ast::File) -> Result { desugar_flags(&mut file); let scope = Scope::new(&file)?; check_decl_constraints(&file, &scope)?; - Ok(compute_field_sizes(&file)) + Ok(file) } #[cfg(test)] mod test { use crate::analyzer; - use crate::ast::*; + use crate::ast; use crate::parser::parse_inline; use codespan_reporting::term::termcolor; @@ -1844,7 +1770,7 @@ mod test { macro_rules! raises { ($code:ident, $text:literal) => {{ - let mut db = SourceDatabase::new(); + let mut db = ast::SourceDatabase::new(); let file = parse_inline(&mut db, "stdin", $text.to_owned()).expect("parsing failure"); let result = analyzer::analyze(&file); assert!(matches!(result, Err(_))); @@ -1859,7 +1785,7 @@ mod test { macro_rules! valid { ($text:literal) => {{ - let mut db = SourceDatabase::new(); + let mut db = ast::SourceDatabase::new(); let file = parse_inline(&mut db, "stdin", $text.to_owned()).expect("parsing failure"); assert!(analyzer::analyze(&file).is_ok()); }}; @@ -3112,7 +3038,7 @@ mod test { ); } - use analyzer::ast::Size; + use analyzer::Size; use Size::*; #[derive(Debug, PartialEq, Eq)] @@ -3123,15 +3049,16 @@ mod test { } fn annotations(text: &str) -> Vec { - let mut db = SourceDatabase::new(); + let mut db = ast::SourceDatabase::new(); let file = parse_inline(&mut db, "stdin", text.to_owned()).expect("parsing failure"); let file = analyzer::analyze(&file).expect("analyzer failure"); + let schema = analyzer::Schema::new(&file); file.declarations .iter() .map(|decl| Annotations { - size: decl.annot.size, - payload_size: decl.annot.payload_size, - fields: decl.fields().map(|field| field.annot.size).collect(), + size: schema.size(decl.key), + payload_size: schema.payload_size(decl.key), + fields: decl.fields().map(|field| schema.size(field.key)).collect(), }) .collect() } @@ -3626,8 +3553,8 @@ mod test { ); } - fn desugar(text: &str) -> analyzer::ast::File { - let mut db = SourceDatabase::new(); + fn desugar(text: &str) -> analyzer::File { + let mut db = ast::SourceDatabase::new(); let file = parse_inline(&mut db, "stdin", text.to_owned()).expect("parsing failure"); analyzer::analyze(&file).expect("analyzer failure") } diff --git a/pdl-compiler/src/ast.rs b/pdl-compiler/src/ast.rs index 20d8bb8..6c6f154 100644 --- a/pdl-compiler/src/ast.rs +++ b/pdl-compiler/src/ast.rs @@ -43,11 +43,6 @@ pub struct SourceRange { pub end: SourceLocation, } -pub trait Annotation: fmt::Debug + Serialize { - type FieldAnnotation: Default + fmt::Debug + Clone; - type DeclAnnotation: Default + fmt::Debug + Clone; -} - #[derive(Debug, Serialize, Clone)] #[serde(tag = "kind", rename = "comment")] pub struct Comment { @@ -154,10 +149,12 @@ pub enum FieldDesc { } #[derive(Debug, Serialize, Clone)] -pub struct Field { +pub struct Field { pub loc: SourceRange, + /// Unique identifier used to refer to the AST node in + /// compilation environments. #[serde(skip_serializing)] - pub annot: A::FieldAnnotation, + pub key: usize, #[serde(flatten)] pub desc: FieldDesc, pub cond: Option, @@ -172,7 +169,7 @@ pub struct TestCase { #[derive(Debug, Serialize, Clone, PartialEq, Eq)] #[serde(tag = "kind")] -pub enum DeclDesc { +pub enum DeclDesc { #[serde(rename = "checksum_declaration")] Checksum { id: String, function: String, width: usize }, #[serde(rename = "custom_field_declaration")] @@ -183,38 +180,42 @@ pub enum DeclDesc { Packet { id: String, constraints: Vec, - fields: Vec>, + fields: Vec, parent_id: Option, }, #[serde(rename = "struct_declaration")] Struct { id: String, constraints: Vec, - fields: Vec>, + fields: Vec, parent_id: Option, }, #[serde(rename = "group_declaration")] - Group { id: String, fields: Vec> }, + Group { id: String, fields: Vec }, #[serde(rename = "test_declaration")] Test { type_id: String, test_cases: Vec }, } #[derive(Debug, Serialize, Clone)] -pub struct Decl { +pub struct Decl { pub loc: SourceRange, + /// Unique identifier used to refer to the AST node in + /// compilation environments. #[serde(skip_serializing)] - pub annot: A::DeclAnnotation, + pub key: usize, #[serde(flatten)] - pub desc: DeclDesc, + pub desc: DeclDesc, } #[derive(Debug, Serialize, Clone)] -pub struct File { +pub struct File { pub version: String, pub file: FileId, pub comments: Vec, pub endianness: Endianness, - pub declarations: Vec>, + pub declarations: Vec, + #[serde(skip_serializing)] + pub max_key: usize, } impl SourceLocation { @@ -350,8 +351,8 @@ impl PartialEq for TestCase { } } -impl Eq for File {} -impl PartialEq for File { +impl Eq for File {} +impl PartialEq for File { fn eq(&self, other: &Self) -> bool { // Implement structural equality, leave out comments and PDL // version information. @@ -359,8 +360,8 @@ impl PartialEq for File { } } -impl File { - pub fn new(file: FileId) -> File { +impl File { + pub fn new(file: FileId) -> File { File { version: "1,0".to_owned(), comments: vec![], @@ -372,71 +373,27 @@ impl File { }, declarations: vec![], file, + max_key: 0, } } /// Iterate over the children of the selected declaration. /// /!\ This method is unsafe to use if the file contains cyclic /// declarations, use with caution. - pub fn iter_children<'d>(&'d self, decl: &'d Decl) -> impl Iterator> { + pub fn iter_children<'d>(&'d self, decl: &'d Decl) -> impl Iterator { self.declarations.iter().filter(|other_decl| other_decl.parent_id() == decl.id()) } } -impl Eq for Decl {} -impl PartialEq for Decl { +impl Eq for Decl {} +impl PartialEq for Decl { fn eq(&self, other: &Self) -> bool { - // Implement structural equality, leave out loc and annot. + // Implement structural equality, leave out loc and key. self.desc == other.desc } } -impl Decl { - pub fn new(loc: SourceRange, desc: DeclDesc) -> Decl { - Decl { loc, annot: Default::default(), desc } - } - - pub fn annotate( - &self, - annot: B::DeclAnnotation, - annotate_fields: F, - ) -> Decl - where - F: FnOnce(&[Field]) -> Vec>, - { - let desc = match &self.desc { - DeclDesc::Checksum { id, function, width } => { - DeclDesc::Checksum { id: id.clone(), function: function.clone(), width: *width } - } - DeclDesc::CustomField { id, width, function } => { - DeclDesc::CustomField { id: id.clone(), width: *width, function: function.clone() } - } - DeclDesc::Enum { id, tags, width } => { - DeclDesc::Enum { id: id.clone(), tags: tags.clone(), width: *width } - } - - DeclDesc::Test { type_id, test_cases } => { - DeclDesc::Test { type_id: type_id.clone(), test_cases: test_cases.clone() } - } - DeclDesc::Packet { id, constraints, parent_id, fields } => DeclDesc::Packet { - id: id.clone(), - constraints: constraints.clone(), - parent_id: parent_id.clone(), - fields: annotate_fields(fields), - }, - DeclDesc::Struct { id, constraints, parent_id, fields } => DeclDesc::Struct { - id: id.clone(), - constraints: constraints.clone(), - parent_id: parent_id.clone(), - fields: annotate_fields(fields), - }, - DeclDesc::Group { id, fields } => { - DeclDesc::Group { id: id.clone(), fields: annotate_fields(fields) } - } - }; - Decl { loc: self.loc, desc, annot } - } - +impl Decl { pub fn id(&self) -> Option<&str> { match &self.desc { DeclDesc::Test { .. } => None, @@ -467,7 +424,7 @@ impl Decl { } } - pub fn fields(&self) -> std::slice::Iter<'_, Field> { + pub fn fields(&self) -> std::slice::Iter<'_, Field> { match &self.desc { DeclDesc::Packet { fields, .. } | DeclDesc::Struct { fields, .. } @@ -478,14 +435,14 @@ impl Decl { /// Return the reference to the payload or body field in a declaration, /// if present. - pub fn payload(&self) -> Option<&Field> { + pub fn payload(&self) -> Option<&Field> { self.fields() .find(|field| matches!(&field.desc, FieldDesc::Payload { .. } | FieldDesc::Body { .. })) } /// Return the reference to the payload or body size field in a declaration, /// if present. - pub fn payload_size(&self) -> Option<&Field> { + pub fn payload_size(&self) -> Option<&Field> { self.fields().find(|field| match &field.desc { FieldDesc::Size { field_id, .. } => field_id == "_payload_" || field_id == "_body_", _ => false, @@ -494,7 +451,7 @@ impl Decl { /// Return the reference to the array size or count field in a declaration, /// if present. - pub fn array_size(&self, id: &str) -> Option<&Field> { + pub fn array_size(&self, id: &str) -> Option<&Field> { self.fields().find(|field| match &field.desc { FieldDesc::Size { field_id, .. } | FieldDesc::Count { field_id, .. } => field_id == id, _ => false, @@ -514,19 +471,15 @@ impl Decl { } } -impl Eq for Field {} -impl PartialEq for Field { +impl Eq for Field {} +impl PartialEq for Field { fn eq(&self, other: &Self) -> bool { // Implement structural equality, leave out loc and annot. self.desc == other.desc } } -impl Field { - pub fn annotate(&self, annot: B::FieldAnnotation) -> Field { - Field { loc: self.loc, annot, cond: self.cond.clone(), desc: self.desc.clone() } - } - +impl Field { pub fn id(&self) -> Option<&str> { match &self.desc { FieldDesc::Checksum { .. } diff --git a/pdl-compiler/src/backends/intermediate.rs b/pdl-compiler/src/backends/intermediate.rs index 4c92ae0..cdae0f1 100644 --- a/pdl-compiler/src/backends/intermediate.rs +++ b/pdl-compiler/src/backends/intermediate.rs @@ -15,7 +15,6 @@ use std::collections::{btree_map::Entry, BTreeMap, HashMap}; use crate::ast; -use crate::parser; pub struct Schema<'a> { pub packets_and_structs: HashMap<&'a str, PacketOrStruct<'a>>, @@ -98,7 +97,7 @@ pub enum ComputedOffset<'a> { Alias(ComputedOffsetId<'a>), } -pub fn generate(file: &parser::ast::File) -> Result { +pub fn generate(file: &ast::File) -> Result { let mut schema = Schema { packets_and_structs: HashMap::new(), enums: HashMap::new() }; match file.endianness.value { ast::EndiannessValue::LittleEndian => {} @@ -112,7 +111,7 @@ pub fn generate(file: &parser::ast::File) -> Result { Ok(schema) } -fn process_decl<'a>(schema: &mut Schema<'a>, decl: &'a parser::ast::Decl) { +fn process_decl<'a>(schema: &mut Schema<'a>, decl: &'a ast::Decl) { match &decl.desc { ast::DeclDesc::Enum { id, tags, width, .. } => process_enum(schema, id, tags, *width), ast::DeclDesc::Packet { id, fields, .. } | ast::DeclDesc::Struct { id, fields, .. } => { @@ -135,18 +134,11 @@ fn process_enum<'a>(schema: &mut Schema<'a>, id: &'a str, tags: &'a [ast::Tag], ); } -fn process_packet_or_struct<'a>( - schema: &mut Schema<'a>, - id: &'a str, - fields: &'a [parser::ast::Field], -) { +fn process_packet_or_struct<'a>(schema: &mut Schema<'a>, id: &'a str, fields: &'a [ast::Field]) { schema.packets_and_structs.insert(id, compute_getters(schema, fields)); } -fn compute_getters<'a>( - schema: &Schema<'a>, - fields: &'a [parser::ast::Field], -) -> PacketOrStruct<'a> { +fn compute_getters<'a>(schema: &Schema<'a>, fields: &'a [ast::Field]) -> PacketOrStruct<'a> { let mut prev_pos_id = None; let mut curr_pos_id = ComputedOffsetId::HeaderStart; let mut computed_values = BTreeMap::new(); diff --git a/pdl-compiler/src/backends/json.rs b/pdl-compiler/src/backends/json.rs index 526b29e..7bd6e4f 100644 --- a/pdl-compiler/src/backends/json.rs +++ b/pdl-compiler/src/backends/json.rs @@ -14,10 +14,10 @@ //! Json compiler backend. -use crate::parser; +use crate::ast; /// Turn the AST into a JSON representation. -pub fn generate(file: &parser::ast::File) -> Result { +pub fn generate(file: &ast::File) -> Result { serde_json::to_string_pretty(&file) .map_err(|err| format!("could not JSON serialize grammar: {err}")) } diff --git a/pdl-compiler/src/backends/rust.rs b/pdl-compiler/src/backends/rust.rs index 117d90d..9e123e1 100644 --- a/pdl-compiler/src/backends/rust.rs +++ b/pdl-compiler/src/backends/rust.rs @@ -21,8 +21,6 @@ use std::collections::HashMap; use std::path::Path; use syn::LitInt; -use crate::analyzer::ast as analyzer_ast; - mod parser; mod preamble; mod serializer; @@ -86,14 +84,15 @@ pub fn mask_bits(n: usize, suffix: &str) -> syn::LitInt { fn generate_packet_size_getter<'a>( scope: &analyzer::Scope<'a>, - fields: impl Iterator, + schema: &analyzer::Schema, + fields: impl Iterator, is_packet: bool, ) -> (usize, proc_macro2::TokenStream) { let mut constant_width = 0; let mut dynamic_widths = Vec::new(); for field in fields { - if let Some(width) = field.annot.static_() { + if let Some(width) = schema.size(field.key).static_() { constant_width += width; continue; } @@ -142,7 +141,7 @@ fn generate_packet_size_getter<'a>( ast::FieldDesc::Array { id, width, .. } => { let id = id.to_ident(); match &decl { - Some(analyzer_ast::Decl { + Some(ast::Decl { desc: ast::DeclDesc::Struct { .. } | ast::DeclDesc::CustomField { .. }, .. }) => { @@ -150,9 +149,7 @@ fn generate_packet_size_getter<'a>( self.#id.iter().map(|elem| elem.get_size()).sum::() } } - Some(analyzer_ast::Decl { - desc: ast::DeclDesc::Enum { width, .. }, .. - }) => { + Some(ast::Decl { desc: ast::DeclDesc::Enum { width, .. }, .. }) => { let width = syn::Index::from(width / 8); let mul_width = (width.index > 1).then(|| quote!(* #width)); quote! { @@ -188,10 +185,7 @@ fn generate_packet_size_getter<'a>( ) } -fn top_level_packet<'a>( - scope: &analyzer::Scope<'a>, - packet_name: &'a str, -) -> &'a analyzer_ast::Decl { +fn top_level_packet<'a>(scope: &analyzer::Scope<'a>, packet_name: &'a str) -> &'a ast::Decl { let mut decl = scope.typedef[packet_name]; while let ast::DeclDesc::Packet { parent_id: Some(parent_id), .. } | ast::DeclDesc::Struct { parent_id: Some(parent_id), .. } = &decl.desc @@ -209,8 +203,8 @@ fn top_level_packet<'a>( fn find_constrained_parent_fields<'a>( scope: &analyzer::Scope<'a>, id: &str, -) -> Vec<&'a analyzer_ast::Field> { - let all_parent_fields: HashMap = HashMap::from_iter( +) -> Vec<&'a ast::Field> { + let all_parent_fields: HashMap = HashMap::from_iter( scope .iter_parent_fields(scope.typedef[id]) .filter_map(|f| f.id().map(|id| (id.to_string(), f))), @@ -244,6 +238,7 @@ fn find_constrained_parent_fields<'a>( /// how to parse and serialize its own fields. fn generate_data_struct( scope: &analyzer::Scope<'_>, + schema: &analyzer::Schema, endianness: ast::EndiannessValue, id: &str, ) -> (proc_macro2::TokenStream, proc_macro2::TokenStream) { @@ -252,8 +247,9 @@ fn generate_data_struct( let span = format_ident!("bytes"); let serializer_span = format_ident!("buffer"); - let mut field_parser = FieldParser::new(scope, endianness, id, &span); - let mut field_serializer = FieldSerializer::new(scope, endianness, id, &serializer_span); + let mut field_parser = FieldParser::new(scope, schema, endianness, id, &span); + let mut field_serializer = + FieldSerializer::new(scope, schema, endianness, id, &serializer_span); for field in decl.fields() { field_parser.add(field); field_serializer.add(field); @@ -270,7 +266,7 @@ fn generate_data_struct( }; let (constant_width, packet_size) = - generate_packet_size_getter(scope, decl.fields(), is_packet); + generate_packet_size_getter(scope, schema, decl.fields(), is_packet); let conforms = if constant_width == 0 { quote! { true } } else { @@ -356,7 +352,7 @@ fn generate_data_struct( /// Turn the constraint into a value (such as `10` or /// `SomeEnum::Foo`). pub fn constraint_to_value( - all_fields: &HashMap, + all_fields: &HashMap, constraint: &ast::Constraint, ) -> proc_macro2::TokenStream { match constraint { @@ -381,6 +377,7 @@ pub fn constraint_to_value( /// Generate code for a `ast::Decl::Packet`. fn generate_packet_decl( scope: &analyzer::Scope<'_>, + schema: &analyzer::Schema, endianness: ast::EndiannessValue, id: &str, ) -> proc_macro2::TokenStream { @@ -596,7 +593,7 @@ fn generate_packet_decl( } }); - let (data_struct_decl, data_struct_impl) = generate_data_struct(scope, endianness, id); + let (data_struct_decl, data_struct_impl) = generate_data_struct(scope, schema, endianness, id); quote! { #child_declaration @@ -709,10 +706,11 @@ fn generate_packet_decl( /// Generate code for a `ast::Decl::Struct`. fn generate_struct_decl( scope: &analyzer::Scope<'_>, + schema: &analyzer::Schema, endianness: ast::EndiannessValue, id: &str, ) -> proc_macro2::TokenStream { - let (struct_decl, struct_impl) = generate_data_struct(scope, endianness, id); + let (struct_decl, struct_impl) = generate_data_struct(scope, schema, endianness, id); quote! { #struct_decl #struct_impl @@ -996,18 +994,21 @@ fn generate_custom_field_decl(id: &str, width: usize) -> proc_macro2::TokenStrea fn generate_decl( scope: &analyzer::Scope<'_>, - file: &analyzer_ast::File, - decl: &analyzer_ast::Decl, + schema: &analyzer::Schema, + file: &ast::File, + decl: &ast::Decl, ) -> proc_macro2::TokenStream { match &decl.desc { - ast::DeclDesc::Packet { id, .. } => generate_packet_decl(scope, file.endianness.value, id), + ast::DeclDesc::Packet { id, .. } => { + generate_packet_decl(scope, schema, file.endianness.value, id) + } ast::DeclDesc::Struct { id, parent_id: None, .. } => { // TODO(mgeisler): handle structs with parents. We could // generate code for them, but the code is not useful // since it would require the caller to unpack everything // manually. We either need to change the API, or // implement the recursive (de)serialization. - generate_struct_decl(scope, file.endianness.value, id) + generate_struct_decl(scope, schema, file.endianness.value, id) } ast::DeclDesc::Enum { id, tags, width } => generate_enum_decl(id, tags, *width), ast::DeclDesc::CustomField { id, width: Some(width), .. } => { @@ -1023,13 +1024,14 @@ fn generate_decl( /// readable source code. pub fn generate_tokens( sources: &ast::SourceDatabase, - file: &analyzer_ast::File, + file: &ast::File, ) -> proc_macro2::TokenStream { let source = sources.get(file.file).expect("could not read source"); let preamble = preamble::generate(Path::new(source.name())); let scope = analyzer::Scope::new(file).expect("could not create scope"); - let decls = file.declarations.iter().map(|decl| generate_decl(&scope, file, decl)); + let schema = analyzer::Schema::new(file); + let decls = file.declarations.iter().map(|decl| generate_decl(&scope, &schema, file, decl)); quote! { #preamble @@ -1041,7 +1043,7 @@ pub fn generate_tokens( /// /// The code is not formatted, pipe it through `rustfmt` to get /// readable source code. -pub fn generate(sources: &ast::SourceDatabase, file: &analyzer_ast::File) -> String { +pub fn generate(sources: &ast::SourceDatabase, file: &ast::File) -> String { let syntax_tree = syn::parse2(generate_tokens(sources, file)).expect("Could not parse code"); prettyplease::unparse(&syntax_tree) } @@ -1061,7 +1063,7 @@ mod tests { /// # Panics /// /// Panics on parse errors. - pub fn parse_str(text: &str) -> analyzer_ast::File { + pub fn parse_str(text: &str) -> ast::File { let mut db = ast::SourceDatabase::new(); let file = parse_inline(&mut db, "stdin", String::from(text)).expect("parse error"); analyzer::analyze(&file).expect("analyzer error") diff --git a/pdl-compiler/src/backends/rust/parser.rs b/pdl-compiler/src/backends/rust/parser.rs index a549341..491ce10 100644 --- a/pdl-compiler/src/backends/rust/parser.rs +++ b/pdl-compiler/src/backends/rust/parser.rs @@ -12,7 +12,6 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::analyzer::ast as analyzer_ast; use crate::backends::rust::{ constraint_to_value, find_constrained_parent_fields, mask_bits, types, ToIdent, ToUpperCamelCase, @@ -28,13 +27,14 @@ fn size_field_ident(id: &str) -> proc_macro2::Ident { /// A single bit-field. struct BitField<'a> { shift: usize, // The shift to apply to this field. - field: &'a analyzer_ast::Field, + field: &'a ast::Field, } pub struct FieldParser<'a> { scope: &'a analyzer::Scope<'a>, + schema: &'a analyzer::Schema, endianness: ast::EndiannessValue, - decl: &'a analyzer_ast::Decl, + decl: &'a ast::Decl, packet_name: &'a str, span: &'a proc_macro2::Ident, chunk: Vec>, @@ -46,12 +46,14 @@ pub struct FieldParser<'a> { impl<'a> FieldParser<'a> { pub fn new( scope: &'a analyzer::Scope<'a>, + schema: &'a analyzer::Schema, endianness: ast::EndiannessValue, packet_name: &'a str, span: &'a proc_macro2::Ident, ) -> FieldParser<'a> { FieldParser { scope, + schema, endianness, decl: scope.typedef[packet_name], packet_name, @@ -63,7 +65,7 @@ impl<'a> FieldParser<'a> { } } - pub fn add(&mut self, field: &'a analyzer_ast::Field) { + pub fn add(&mut self, field: &'a ast::Field) { match &field.desc { _ if field.cond.is_some() => self.add_optional_field(field), _ if self.scope.is_bitfield(field) => self.add_bit_field(field), @@ -73,7 +75,7 @@ impl<'a> FieldParser<'a> { *width, type_id.as_deref(), *size, - field.annot.padded_size, + self.schema.padded_size(field.key), self.scope.get_type_declaration(field), ), ast::FieldDesc::Typedef { id, type_id } => self.add_typedef_field(id, type_id), @@ -85,7 +87,7 @@ impl<'a> FieldParser<'a> { } } - fn add_optional_field(&mut self, field: &'a analyzer_ast::Field) { + fn add_optional_field(&mut self, field: &'a ast::Field) { let cond_id = field.cond.as_ref().unwrap().id.to_ident(); let cond_value = syn::parse_str::(&format!( "{}", @@ -139,9 +141,9 @@ impl<'a> FieldParser<'a> { }) } - fn add_bit_field(&mut self, field: &'a analyzer_ast::Field) { + fn add_bit_field(&mut self, field: &'a ast::Field) { self.chunk.push(BitField { shift: self.shift, field }); - self.shift += field.annot.size.static_().unwrap(); + self.shift += self.schema.size(field.key).static_().unwrap(); if self.shift % 8 != 0 { return; } @@ -182,7 +184,7 @@ impl<'a> FieldParser<'a> { v = quote! { (#v >> #shift) } } - let width = field.annot.size.static_().unwrap(); + let width = self.schema.size(field.key).static_().unwrap(); let value_type = types::Integer::new(width); if !single_value && width < value_type.width { // Mask value if we grabbed more than `width` and if @@ -300,7 +302,7 @@ impl<'a> FieldParser<'a> { let mut offset = 0; for field in fields { - if let Some(width) = field.annot.static_() { + if let Some(width) = self.schema.size(field.key).static_() { offset += width; } else { return None; @@ -335,19 +337,20 @@ impl<'a> FieldParser<'a> { // known). If None, the array is a Vec with a dynamic size. size: Option, padding_size: Option, - decl: Option<&analyzer_ast::Decl>, + decl: Option<&ast::Decl>, ) { enum ElementWidth { Static(usize), // Static size in bytes. Unknown, } - let element_width = match width.or_else(|| decl.unwrap().annot.total_size().static_()) { - Some(w) => { - assert_eq!(w % 8, 0, "Array element size ({w}) is not a multiple of 8"); - ElementWidth::Static(w / 8) - } - None => ElementWidth::Unknown, - }; + let element_width = + match width.or_else(|| self.schema.total_size(decl.unwrap().key).static_()) { + Some(w) => { + assert_eq!(w % 8, 0, "Array element size ({w}) is not a multiple of 8"); + ElementWidth::Static(w / 8) + } + None => ElementWidth::Unknown, + }; // The "shape" of the array, i.e., the number of elements // given via a static count, a count field, a size field, or @@ -528,11 +531,11 @@ impl<'a> FieldParser<'a> { let id = id.to_ident(); let type_id = type_id.to_ident(); - self.code.push(match decl.annot.size { - analyzer_ast::Size::Unknown | analyzer_ast::Size::Dynamic => quote! { + self.code.push(match self.schema.size(decl.key) { + analyzer::Size::Unknown | analyzer::Size::Dynamic => quote! { let #id = #type_id::parse_inner(&mut #span)?; }, - analyzer_ast::Size::Static(width) => { + analyzer::Size::Static(width) => { assert_eq!(width % 8, 0, "Typedef field type size is not a multiple of 8"); match &decl.desc { ast::DeclDesc::Checksum { .. } => todo!(), @@ -641,7 +644,7 @@ impl<'a> FieldParser<'a> { span: &proc_macro2::Ident, width: Option, type_id: Option<&str>, - decl: Option<&analyzer_ast::Decl>, + decl: Option<&ast::Decl>, ) -> proc_macro2::TokenStream { if let Some(width) = width { let get_uint = types::get_uint(self.endianness, width, span); @@ -794,7 +797,7 @@ mod tests { /// # Panics /// /// Panics on parse errors. - pub fn parse_str(text: &str) -> analyzer_ast::File { + pub fn parse_str(text: &str) -> ast::File { let mut db = ast::SourceDatabase::new(); let file = parse_inline(&mut db, "stdin", String::from(text)).expect("parse error"); analyzer::analyze(&file).expect("analyzer error") @@ -810,8 +813,9 @@ mod tests { "; let file = parse_str(code); let scope = analyzer::Scope::new(&file).unwrap(); + let schema = analyzer::Schema::new(&file); let span = format_ident!("bytes"); - let parser = FieldParser::new(&scope, file.endianness.value, "P", &span); + let parser = FieldParser::new(&scope, &schema, file.endianness.value, "P", &span); assert_eq!(parser.find_size_field("a"), None); assert_eq!(parser.find_count_field("a"), None); } @@ -827,8 +831,9 @@ mod tests { "; let file = parse_str(code); let scope = analyzer::Scope::new(&file).unwrap(); + let schema = analyzer::Schema::new(&file); let span = format_ident!("bytes"); - let parser = FieldParser::new(&scope, file.endianness.value, "P", &span); + let parser = FieldParser::new(&scope, &schema, file.endianness.value, "P", &span); assert_eq!(parser.find_size_field("b"), None); assert_eq!(parser.find_count_field("b"), Some(format_ident!("b_count"))); } @@ -844,8 +849,9 @@ mod tests { "; let file = parse_str(code); let scope = analyzer::Scope::new(&file).unwrap(); + let schema = analyzer::Schema::new(&file); let span = format_ident!("bytes"); - let parser = FieldParser::new(&scope, file.endianness.value, "P", &span); + let parser = FieldParser::new(&scope, &schema, file.endianness.value, "P", &span); assert_eq!(parser.find_size_field("c"), Some(format_ident!("c_size"))); assert_eq!(parser.find_count_field("c"), None); } diff --git a/pdl-compiler/src/backends/rust/serializer.rs b/pdl-compiler/src/backends/rust/serializer.rs index 7290701..6989eae 100644 --- a/pdl-compiler/src/backends/rust/serializer.rs +++ b/pdl-compiler/src/backends/rust/serializer.rs @@ -12,7 +12,6 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::analyzer::ast as analyzer_ast; use crate::backends::rust::{mask_bits, types, ToIdent, ToUpperCamelCase}; use crate::{analyzer, ast}; use quote::{format_ident, quote}; @@ -26,6 +25,7 @@ struct BitField { pub struct FieldSerializer<'a> { scope: &'a analyzer::Scope<'a>, + schema: &'a analyzer::Schema, endianness: ast::EndiannessValue, packet_name: &'a str, span: &'a proc_macro2::Ident, @@ -37,12 +37,14 @@ pub struct FieldSerializer<'a> { impl<'a> FieldSerializer<'a> { pub fn new( scope: &'a analyzer::Scope<'a>, + schema: &'a analyzer::Schema, endianness: ast::EndiannessValue, packet_name: &'a str, span: &'a proc_macro2::Ident, ) -> FieldSerializer<'a> { FieldSerializer { scope, + schema, endianness, packet_name, span, @@ -52,14 +54,14 @@ impl<'a> FieldSerializer<'a> { } } - pub fn add(&mut self, field: &analyzer_ast::Field) { + pub fn add(&mut self, field: &ast::Field) { match &field.desc { _ if field.cond.is_some() => self.add_optional_field(field), _ if self.scope.is_bitfield(field) => self.add_bit_field(field), ast::FieldDesc::Array { id, width, .. } => self.add_array_field( id, *width, - field.annot.padded_size, + self.schema.padded_size(field.key), self.scope.get_type_declaration(field), ), ast::FieldDesc::Typedef { id, type_id } => { @@ -74,7 +76,7 @@ impl<'a> FieldSerializer<'a> { } } - fn add_optional_field(&mut self, field: &analyzer_ast::Field) { + fn add_optional_field(&mut self, field: &ast::Field) { self.code.push(match &field.desc { ast::FieldDesc::Scalar { id, width } => { let name = id; @@ -136,8 +138,8 @@ impl<'a> FieldSerializer<'a> { }) } - fn add_bit_field(&mut self, field: &analyzer_ast::Field) { - let width = field.annot.size.static_().unwrap(); + fn add_bit_field(&mut self, field: &ast::Field) { + let width = self.schema.size(field.key).static_().unwrap(); let shift = self.shift; match &field.desc { @@ -370,7 +372,7 @@ impl<'a> FieldSerializer<'a> { id: &str, width: Option, padding_size: Option, - decl: Option<&analyzer_ast::Decl>, + decl: Option<&ast::Decl>, ) { let span = format_ident!("{}", self.span); let serialize = match width { @@ -398,29 +400,24 @@ impl<'a> FieldSerializer<'a> { let packet_name = self.packet_name; let name = id; let id = id.to_ident(); - self.code.push(if let Some(padding_size) = padding_size { - let array_size = match (&width, decl) { - (Some(width), _) - | ( - _, - Some(analyzer_ast::Decl { - annot: - analyzer_ast::DeclAnnotation { - size: analyzer_ast::Size::Static(width), .. - }, - .. - }), - ) => { - let element_size = proc_macro2::Literal::usize_unsuffixed(width / 8); + + if let Some(padding_size) = padding_size { + let element_width = match &width { + Some(width) => Some(*width), + None => self.schema.size(decl.unwrap().key).static_(), + }; + + let array_size = match element_width { + Some(element_width) => { + let element_size = proc_macro2::Literal::usize_unsuffixed(element_width / 8); quote! { self.#id.len() * #element_size } } - (_, Some(_)) => { + _ => { quote! { self.#id.iter().fold(0, |size, elem| size + elem.get_size()) } } - _ => unreachable!(), }; - quote! { + self.code.push(quote! { let array_size = #array_size; if array_size > #padding_size { return Err(EncodeError::SizeOverflow { @@ -434,14 +431,14 @@ impl<'a> FieldSerializer<'a> { #serialize; } #span.put_bytes(0, #padding_size - array_size); - } + }); } else { - quote! { + self.code.push(quote! { for elem in &self.#id { #serialize; } - } - }); + }); + } } fn add_typedef_field(&mut self, id: &str, type_id: &str) { diff --git a/pdl-compiler/src/backends/rust/types.rs b/pdl-compiler/src/backends/rust/types.rs index 799cd4e..ae6754d 100644 --- a/pdl-compiler/src/backends/rust/types.rs +++ b/pdl-compiler/src/backends/rust/types.rs @@ -14,7 +14,6 @@ //! Utility functions for dealing with Rust integer types. -use crate::analyzer::ast as analyzer_ast; use crate::backends::rust::ToIdent; use crate::{analyzer, ast}; use quote::{format_ident, quote}; @@ -49,7 +48,7 @@ impl quote::ToTokens for Integer { } } -pub fn rust_type(field: &analyzer_ast::Field) -> proc_macro2::TokenStream { +pub fn rust_type(field: &ast::Field) -> proc_macro2::TokenStream { match &field.desc { ast::FieldDesc::Scalar { width, .. } if field.cond.is_some() => { let field_type = Integer::new(*width); @@ -90,10 +89,7 @@ pub fn rust_type(field: &analyzer_ast::Field) -> proc_macro2::TokenStream { } } -pub fn rust_borrow( - field: &analyzer_ast::Field, - scope: &analyzer::Scope<'_>, -) -> proc_macro2::TokenStream { +pub fn rust_borrow(field: &ast::Field, scope: &analyzer::Scope<'_>) -> proc_macro2::TokenStream { match &field.desc { ast::FieldDesc::Scalar { .. } => quote!(), ast::FieldDesc::Typedef { type_id, .. } => match &scope.typedef[type_id].desc { diff --git a/pdl-compiler/src/backends/rust_no_allocation/mod.rs b/pdl-compiler/src/backends/rust_no_allocation/mod.rs index c98bc12..8bf7e57 100644 --- a/pdl-compiler/src/backends/rust_no_allocation/mod.rs +++ b/pdl-compiler/src/backends/rust_no_allocation/mod.rs @@ -37,7 +37,6 @@ use proc_macro2::TokenStream; use quote::quote; use crate::ast; -use crate::parser; use self::{ enums::generate_enum, packet_parser::generate_packet, @@ -46,7 +45,7 @@ use self::{ use super::intermediate::Schema; -pub fn generate(file: &parser::ast::File, schema: &Schema) -> Result { +pub fn generate(file: &ast::File, schema: &Schema) -> Result { match file.endianness.value { ast::EndiannessValue::LittleEndian => {} _ => unimplemented!("Only little_endian endianness supported"), @@ -80,7 +79,7 @@ pub fn generate(file: &parser::ast::File, schema: &Schema) -> Result>, ) -> Result { diff --git a/pdl-compiler/src/backends/rust_no_allocation/packet_parser.rs b/pdl-compiler/src/backends/rust_no_allocation/packet_parser.rs index bd07630..1f9fc0f 100644 --- a/pdl-compiler/src/backends/rust_no_allocation/packet_parser.rs +++ b/pdl-compiler/src/backends/rust_no_allocation/packet_parser.rs @@ -18,7 +18,6 @@ use proc_macro2::TokenStream; use quote::{format_ident, quote}; use crate::ast; -use crate::parser; use crate::backends::intermediate::{ ComputedOffsetId, ComputedValueId, PacketOrStruct, PacketOrStructLength, Schema, @@ -29,7 +28,7 @@ use super::utils::get_integer_type; pub fn generate_packet( id: &str, - fields: &[parser::ast::Field], + fields: &[ast::Field], parent_id: Option<&str>, schema: &Schema, curr_schema: &PacketOrStruct, diff --git a/pdl-compiler/src/backends/rust_no_allocation/packet_serializer.rs b/pdl-compiler/src/backends/rust_no_allocation/packet_serializer.rs index c088b75..7b06944 100644 --- a/pdl-compiler/src/backends/rust_no_allocation/packet_serializer.rs +++ b/pdl-compiler/src/backends/rust_no_allocation/packet_serializer.rs @@ -23,7 +23,6 @@ use crate::{ intermediate::{ComputedValue, ComputedValueId, PacketOrStruct, Schema}, rust_no_allocation::utils::get_integer_type, }, - parser, }; fn standardize_child(id: &str) -> &str { @@ -36,7 +35,7 @@ fn standardize_child(id: &str) -> &str { pub fn generate_packet_serializer( id: &str, parent_id: Option<&str>, - fields: &[parser::ast::Field], + fields: &[ast::Field], schema: &Schema, curr_schema: &PacketOrStruct, children: &HashMap<&str, Vec<&str>>, diff --git a/pdl-compiler/src/main.rs b/pdl-compiler/src/main.rs index 8992d29..bf2bbf3 100644 --- a/pdl-compiler/src/main.rs +++ b/pdl-compiler/src/main.rs @@ -64,10 +64,7 @@ struct Opt { } /// Remove declarations listed in the input filter. -fn filter_declarations( - file: parser::ast::File, - exclude_declarations: &[String], -) -> parser::ast::File { +fn filter_declarations(file: ast::File, exclude_declarations: &[String]) -> ast::File { ast::File { declarations: file .declarations diff --git a/pdl-compiler/src/parser.rs b/pdl-compiler/src/parser.rs index 29407ff..6df417b 100644 --- a/pdl-compiler/src/parser.rs +++ b/pdl-compiler/src/parser.rs @@ -12,28 +12,13 @@ // See the License for the specific language governing permissions and // limitations under the License. +use crate::ast; use codespan_reporting::diagnostic::Diagnostic; use codespan_reporting::files; use pest::iterators::{Pair, Pairs}; use pest::{Parser, Token}; use std::iter::{Filter, Peekable}; -pub mod ast { - use serde::Serialize; - - #[derive(Debug, Serialize, Clone, Default, PartialEq, Eq)] - pub struct Annotation; - - impl crate::ast::Annotation for Annotation { - type FieldAnnotation = (); - type DeclAnnotation = (); - } - - pub type Field = crate::ast::Field; - pub type Decl = crate::ast::Decl; - pub type File = crate::ast::File; -} - // Generate the PDL parser. // // TODO: @@ -191,26 +176,32 @@ pub struct PDLParser; type Node<'i> = Pair<'i, Rule>; type NodeIterator<'i> = Peekable, fn(&Node<'i>) -> bool>>; -type Context<'a> = (crate::ast::FileId, &'a Vec); +struct Context<'a>(ast::FileId, &'a Vec, std::cell::Cell); trait Helpers<'i> { fn children(self) -> NodeIterator<'i>; - fn as_loc(&self, context: &Context) -> crate::ast::SourceRange; + fn as_loc(&self, context: &Context) -> ast::SourceRange; fn as_string(&self) -> String; fn as_usize(&self) -> Result; } +impl<'a> Context<'a> { + fn key(&self) -> usize { + self.2.replace(self.2.get() + 1) + } +} + impl<'i> Helpers<'i> for Node<'i> { fn children(self) -> NodeIterator<'i> { self.into_inner().filter((|n| n.as_rule() != Rule::COMMENT) as fn(&Self) -> bool).peekable() } - fn as_loc(&self, context: &Context) -> crate::ast::SourceRange { + fn as_loc(&self, context: &Context) -> ast::SourceRange { let span = self.as_span(); - crate::ast::SourceRange { + ast::SourceRange { file: context.0, - start: crate::ast::SourceLocation::new(span.start_pos().pos(), context.1), - end: crate::ast::SourceLocation::new(span.end_pos().pos(), context.1), + start: ast::SourceLocation::new(span.start_pos().pos(), context.1), + end: ast::SourceLocation::new(span.end_pos().pos(), context.1), } } @@ -297,22 +288,22 @@ fn parse_size_modifier_opt(iter: &mut NodeIterator<'_>) -> Option { maybe(iter, Rule::size_modifier).map(|n| n.as_string()) } -fn parse_endianness(node: Node<'_>, context: &Context) -> Result { +fn parse_endianness(node: Node<'_>, context: &Context) -> Result { if node.as_rule() != Rule::endianness_declaration { err_unexpected_rule(Rule::endianness_declaration, node.as_rule()) } else { - Ok(crate::ast::Endianness { + Ok(ast::Endianness { loc: node.as_loc(context), value: match node.as_str().trim() { - "little_endian_packets" => crate::ast::EndiannessValue::LittleEndian, - "big_endian_packets" => crate::ast::EndiannessValue::BigEndian, + "little_endian_packets" => ast::EndiannessValue::LittleEndian, + "big_endian_packets" => ast::EndiannessValue::BigEndian, _ => unreachable!(), }, }) } } -fn parse_constraint(node: Node<'_>, context: &Context) -> Result { +fn parse_constraint(node: Node<'_>, context: &Context) -> Result { if node.as_rule() != Rule::constraint { err_unexpected_rule(Rule::constraint, node.as_rule()) } else { @@ -320,19 +311,19 @@ fn parse_constraint(node: Node<'_>, context: &Context) -> Result, context: &Context, -) -> Result, String> { +) -> Result, String> { maybe(iter, Rule::constraint_list) .map_or(Ok(vec![]), |n| n.children().map(|n| parse_constraint(n, context)).collect()) } -fn parse_enum_value(node: Node<'_>, context: &Context) -> Result { +fn parse_enum_value(node: Node<'_>, context: &Context) -> Result { if node.as_rule() != Rule::enum_value { err_unexpected_rule(Rule::enum_value, node.as_rule()) } else { @@ -340,19 +331,19 @@ fn parse_enum_value(node: Node<'_>, context: &Context) -> Result, context: &Context, -) -> Result, String> { +) -> Result, String> { maybe(iter, Rule::enum_value_list) .map_or(Ok(vec![]), |n| n.children().map(|n| parse_enum_value(n, context)).collect()) } -fn parse_enum_range(node: Node<'_>, context: &Context) -> Result { +fn parse_enum_range(node: Node<'_>, context: &Context) -> Result { if node.as_rule() != Rule::enum_range { err_unexpected_rule(Rule::enum_range, node.as_rule()) } else { @@ -362,34 +353,34 @@ fn parse_enum_range(node: Node<'_>, context: &Context) -> Result, context: &Context) -> Result { +fn parse_enum_other(node: Node<'_>, context: &Context) -> Result { if node.as_rule() != Rule::enum_other { err_unexpected_rule(Rule::enum_other, node.as_rule()) } else { let loc = node.as_loc(context); let mut children = node.children(); let id = parse_identifier(&mut children)?; - Ok(crate::ast::TagOther { id, loc }) + Ok(ast::TagOther { id, loc }) } } -fn parse_enum_tag(node: Node<'_>, context: &Context) -> Result { +fn parse_enum_tag(node: Node<'_>, context: &Context) -> Result { if node.as_rule() != Rule::enum_tag { err_unexpected_rule(Rule::enum_tag, node.as_rule()) } else { match node.children().next() { Some(node) if node.as_rule() == Rule::enum_value => { - Ok(crate::ast::Tag::Value(parse_enum_value(node, context)?)) + Ok(ast::Tag::Value(parse_enum_value(node, context)?)) } Some(node) if node.as_rule() == Rule::enum_range => { - Ok(crate::ast::Tag::Range(parse_enum_range(node, context)?)) + Ok(ast::Tag::Range(parse_enum_range(node, context)?)) } Some(node) if node.as_rule() == Rule::enum_other => { - Ok(crate::ast::Tag::Other(parse_enum_other(node, context)?)) + Ok(ast::Tag::Other(parse_enum_other(node, context)?)) } Some(node) => Err(format!( "expected rule {:?} or {:?}, got {:?}", @@ -409,7 +400,7 @@ fn parse_enum_tag(node: Node<'_>, context: &Context) -> Result, context: &Context, -) -> Result, String> { +) -> Result, String> { expect(iter, Rule::enum_tag_list) .and_then(|n| n.children().map(|n| parse_enum_tag(n, context)).collect()) } @@ -421,18 +412,18 @@ fn parse_field(node: Node<'_>, context: &Context) -> Result let cond = children.next(); let rule = desc.as_rule(); let mut children = desc.children(); - Ok(crate::ast::Field { + Ok(ast::Field { loc, - annot: Default::default(), + key: context.key(), cond: cond.map(|constraint| parse_constraint(constraint, context)).transpose()?, desc: match rule { Rule::checksum_field => { let field_id = parse_identifier(&mut children)?; - crate::ast::FieldDesc::Checksum { field_id } + ast::FieldDesc::Checksum { field_id } } Rule::padding_field => { let size = parse_integer(&mut children)?; - crate::ast::FieldDesc::Padding { size } + ast::FieldDesc::Padding { size } } Rule::size_field => { let field_id = match children.next() { @@ -443,39 +434,39 @@ fn parse_field(node: Node<'_>, context: &Context) -> Result None => err_missing_rule(Rule::identifier)?, }; let width = parse_integer(&mut children)?; - crate::ast::FieldDesc::Size { field_id, width } + ast::FieldDesc::Size { field_id, width } } Rule::count_field => { let field_id = parse_identifier(&mut children)?; let width = parse_integer(&mut children)?; - crate::ast::FieldDesc::Count { field_id, width } + ast::FieldDesc::Count { field_id, width } } Rule::elementsize_field => { let field_id = parse_identifier(&mut children)?; let width = parse_integer(&mut children)?; - crate::ast::FieldDesc::ElementSize { field_id, width } + ast::FieldDesc::ElementSize { field_id, width } } - Rule::body_field => crate::ast::FieldDesc::Body, + Rule::body_field => ast::FieldDesc::Body, Rule::payload_field => { let size_modifier = parse_size_modifier_opt(&mut children); - crate::ast::FieldDesc::Payload { size_modifier } + ast::FieldDesc::Payload { size_modifier } } Rule::fixed_field => match children.next() { Some(n) if n.as_rule() == Rule::integer => { let value = n.as_usize()?; let width = parse_integer(&mut children)?; - crate::ast::FieldDesc::FixedScalar { width, value } + ast::FieldDesc::FixedScalar { width, value } } Some(n) if n.as_rule() == Rule::identifier => { let tag_id = n.as_string(); let enum_id = parse_identifier(&mut children)?; - crate::ast::FieldDesc::FixedEnum { enum_id, tag_id } + ast::FieldDesc::FixedEnum { enum_id, tag_id } } _ => unreachable!(), }, Rule::reserved_field => { let width = parse_integer(&mut children)?; - crate::ast::FieldDesc::Reserved { width } + ast::FieldDesc::Reserved { width } } Rule::array_field => { let id = parse_identifier(&mut children)?; @@ -493,22 +484,22 @@ fn parse_field(node: Node<'_>, context: &Context) -> Result } None => (None, None), }; - crate::ast::FieldDesc::Array { id, type_id, width, size, size_modifier } + ast::FieldDesc::Array { id, type_id, width, size, size_modifier } } Rule::scalar_field => { let id = parse_identifier(&mut children)?; let width = parse_integer(&mut children)?; - crate::ast::FieldDesc::Scalar { id, width } + ast::FieldDesc::Scalar { id, width } } Rule::typedef_field => { let id = parse_identifier(&mut children)?; let type_id = parse_identifier(&mut children)?; - crate::ast::FieldDesc::Typedef { id, type_id } + ast::FieldDesc::Typedef { id, type_id } } Rule::group_field => { let group_id = parse_identifier(&mut children)?; let constraints = parse_constraint_list_opt(&mut children, context)?; - crate::ast::FieldDesc::Group { group_id, constraints } + ast::FieldDesc::Group { group_id, constraints } } _ => return Err(format!("expected rule *_field, got {:?}", rule)), }, @@ -530,7 +521,7 @@ fn parse_field_list_opt( fn parse_toplevel(root: Node<'_>, context: &Context) -> Result { let mut toplevel_comments = vec![]; - let mut file = crate::ast::File::new(context.0); + let mut file = ast::File::new(context.0); let mut comment_start = vec![]; for token in root.clone().tokens() { @@ -538,11 +529,11 @@ fn parse_toplevel(root: Node<'_>, context: &Context) -> Result comment_start.push(pos), Token::End { rule: Rule::COMMENT, pos } => { let start_pos = comment_start.pop().unwrap(); - file.comments.push(crate::ast::Comment { - loc: crate::ast::SourceRange { + file.comments.push(ast::Comment { + loc: ast::SourceRange { file: context.0, - start: crate::ast::SourceLocation::new(start_pos.pos(), context.1), - end: crate::ast::SourceLocation::new(pos.pos(), context.1), + start: ast::SourceLocation::new(start_pos.pos(), context.1), + end: ast::SourceLocation::new(pos.pos(), context.1), }, text: start_pos.span(&pos).as_str().to_owned(), }) @@ -562,10 +553,11 @@ fn parse_toplevel(root: Node<'_>, context: &Context) -> Result { let mut children = node.children(); @@ -573,10 +565,11 @@ fn parse_toplevel(root: Node<'_>, context: &Context) -> Result { let mut children = node.children(); @@ -584,10 +577,11 @@ fn parse_toplevel(root: Node<'_>, context: &Context) -> Result { let mut children = node.children(); @@ -596,10 +590,11 @@ fn parse_toplevel(root: Node<'_>, context: &Context) -> Result { let mut children = node.children(); @@ -608,18 +603,22 @@ fn parse_toplevel(root: Node<'_>, context: &Context) -> Result { let mut children = node.children(); expect(&mut children, Rule::GROUP)?; let id = parse_identifier(&mut children)?; let fields = parse_field_list(&mut children, context)?; - file.declarations - .push(crate::ast::Decl::new(loc, crate::ast::DeclDesc::Group { id, fields })) + file.declarations.push(ast::Decl { + loc, + key: context.key(), + desc: ast::DeclDesc::Group { id, fields }, + }) } Rule::test_declaration => {} Rule::EOI => (), @@ -627,6 +626,7 @@ fn parse_toplevel(root: Node<'_>, context: &Context) -> Result, context: &Context) -> Result Result> { +) -> Result> { let root = PDLParser::parse(Rule::file, &source) .map_err(|e| { Diagnostic::error() @@ -648,7 +648,8 @@ pub fn parse_inline( .unwrap(); let line_starts: Vec<_> = files::line_starts(&source).collect(); let file = sources.add(name.to_owned(), source.clone()); - parse_toplevel(root, &(file, &line_starts)).map_err(|e| Diagnostic::error().with_message(e)) + parse_toplevel(root, &Context(file, &line_starts, std::cell::Cell::new(0))) + .map_err(|e| Diagnostic::error().with_message(e)) } /// Parse a new source file. @@ -657,9 +658,9 @@ pub fn parse_inline( /// database. Returns the constructed AST, or a descriptive error /// message in case of syntax error. pub fn parse_file( - sources: &mut crate::ast::SourceDatabase, + sources: &mut ast::SourceDatabase, name: &str, -) -> Result> { +) -> Result> { let source = std::fs::read_to_string(name).map_err(|e| { Diagnostic::error().with_message(format!("failed to read input file '{}': {}", name, e)) })?; @@ -674,10 +675,10 @@ mod test { fn endianness_is_set() { // The file starts out with a placeholder little-endian value. // This tests that we update it while parsing. - let mut db = crate::ast::SourceDatabase::new(); + let mut db = ast::SourceDatabase::new(); let file = parse_inline(&mut db, "stdin", String::from(" big_endian_packets ")).unwrap(); - assert_eq!(file.endianness.value, crate::ast::EndiannessValue::BigEndian); - assert_ne!(file.endianness.loc, crate::ast::SourceRange::default()); + assert_eq!(file.endianness.value, ast::EndiannessValue::BigEndian); + assert_ne!(file.endianness.loc, ast::SourceRange::default()); } #[test] @@ -709,7 +710,7 @@ mod test { fn test_no_whitespace_between_keywords() { // Validate that the parser rejects inputs where whitespaces // are not applied between alphabetical keywords and identifiers. - let mut db = crate::ast::SourceDatabase::new(); + let mut db = ast::SourceDatabase::new(); assert!(parse_inline( &mut db, "test", diff --git a/pdl-runtime/src/lib.rs b/pdl-runtime/src/lib.rs index 0ae10fa..3a468b8 100644 --- a/pdl-runtime/src/lib.rs +++ b/pdl-runtime/src/lib.rs @@ -32,7 +32,12 @@ pub enum DecodeError { #[error("Due to size restrictions a struct could not be parsed.")] ImpossibleStructError, #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")] - InvalidEnumValueError { obj: &'static str, field: &'static str, value: u64, type_: &'static str }, + InvalidEnumValueError { + obj: &'static str, + field: &'static str, + value: u64, + type_: &'static str, + }, #[error("expected child {expected}, got {actual}")] InvalidChildError { expected: &'static str, actual: String }, #[error("packet has trailing bytes")]