Skip to content

Commit

Permalink
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Use own proc macro to generate code to deal with packets
Browse files Browse the repository at this point in the history
Features:
- removes unnecessary dependicies
- more type safe code
- faster parser: 3x
fixes #1
Dushistov committed Mar 12, 2020
1 parent 567263b commit 5390375
Showing 20 changed files with 3,463 additions and 1,499 deletions.
3 changes: 3 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -9,6 +9,9 @@ matrix:
allow_failures:
- rust: nightly
fast_finish: true
before_script:
- rustup component add rustfmt-preview
- which rustfmt
script:
- cargo test --verbose --workspace
- cargo test --release --verbose --workspace
12 changes: 6 additions & 6 deletions ublox/Cargo.toml
Original file line number Diff line number Diff line change
@@ -11,12 +11,12 @@ default = []
serial = ["serialport", "crc"]

[dependencies]
serde = "1.0"
serde_derive = "1.0"
bincode = "1.2.1"
chrono = "0.4"
ublox_derive = "0.0.0"
ublox_derive = "0.0.1"
serialport = { version = "3.3.0", optional = true }
crc = { version = "1.8.1", optional = true }
num-derive = "0.3.0"
num-traits = "0.2.11"
bitflags = "1.2.1"

[dev-dependencies]
rand = "0.7.3"
cpu-time = "1.0.0"
29 changes: 14 additions & 15 deletions ublox/examples/serial.rs
Original file line number Diff line number Diff line change
@@ -6,24 +6,23 @@ mod serial {

pub fn main() {
let mut dev = Device::new("/dev/ttyUSB0").unwrap();

let pos = Position {
lon: -97.5,
lat: 30.2,
alt: 200.0,
};
println!("Setting AID data...");
match dev.load_aid_data(Some(pos), Some(Utc::now())) {
Err(e) => {
println!("Got error setting AID data: {:?}", e);
}
_ => {}
}

/*
let pos = Position {
lon: -97.5,
lat: 30.2,
alt: 200.0,
};
println!("Setting AID data...");
match dev.load_aid_data(Some(pos), Some(Utc::now())) {
Err(e) => {
println!("Got error setting AID data: {:?}", e);
}
_ => {}
}
loop {
dev.poll_for(Duration::from_millis(500)).unwrap();
println!("{:?}", dev.get_solution());
}
} */
}
}

73 changes: 58 additions & 15 deletions ublox/src/error.rs
Original file line number Diff line number Diff line change
@@ -1,25 +1,68 @@
use std::convert;
use std::io;
use std::fmt;

#[derive(Debug)]
pub enum Error {
InvalidChecksum,
UnexpectedPacket,
TimedOutWaitingForAck(u8, u8),
IoError(io::Error),
BincodeError(bincode::Error),
pub enum MemWriterError<E>
where
E: std::error::Error,
{
NotEnoughMem,
Custom(E),
}

impl convert::From<io::Error> for Error {
fn from(error: io::Error) -> Self {
Error::IoError(error)
impl<E> fmt::Display for MemWriterError<E>
where
E: std::error::Error,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
MemWriterError::NotEnoughMem => f.write_str("Not enough memory error"),
MemWriterError::Custom(e) => write!(f, "MemWriterError: {}", e),
}
}
}

impl convert::From<bincode::Error> for Error {
fn from(error: bincode::Error) -> Self {
Error::BincodeError(error)
impl<E> std::error::Error for MemWriterError<E> where E: std::error::Error {}

/// Error that possible during packets parsing
#[derive(Debug, PartialEq)]
pub enum ParserError {
InvalidChecksum {
expect: u16,
got: u16,
},
InvalidField {
packet: &'static str,
field: &'static str,
},
InvalidPacketLen {
packet: &'static str,
expect: usize,
got: usize,
},
}

impl fmt::Display for ParserError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ParserError::InvalidChecksum { expect, got } => write!(
f,
"Not valid packet's checksum, expect {:x}, got {:x}",
expect, got
),
ParserError::InvalidField { packet, field } => {
write!(f, "Invalid field {} of packet {}", field, packet)
}
ParserError::InvalidPacketLen {
packet,
expect,
got,
} => write!(
f,
"Invalid packet({}) length, expect {}, got {}",
packet, expect, got
),
}
}
}

pub type Result<T> = std::result::Result<T, Error>;
impl std::error::Error for ParserError {}
9 changes: 6 additions & 3 deletions ublox/src/lib.rs
Original file line number Diff line number Diff line change
@@ -4,13 +4,16 @@
//! At time of writing this library is developed for a device which behaves like
//! a NEO-6M device.
pub use crate::segmenter::Segmenter;
#[cfg(feature = "serial")]
pub use crate::serialport::{Device, ResetType};
pub use crate::ubx_packets::*;
pub use crate::{
error::{MemWriterError, ParserError},
parser::{Parser, ParserIter},
ubx_packets::*,
};

mod error;
mod segmenter;
mod parser;
#[cfg(feature = "serial")]
mod serialport;
mod ubx_packets;
121 changes: 121 additions & 0 deletions ublox/src/parser.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
use crate::{
error::ParserError,
ubx_packets::{match_packet, ubx_checksum, PacketRef, SYNC_CHAR_1, SYNC_CHAR_2},
};

/// Some big number, TODO: need validation against all known packets
const MAX_PACK_LEN: usize = 1022;

/// Streaming parser for UBX protocol with buffer
#[derive(Default)]
pub struct Parser {
buf: Vec<u8>,
}

impl Parser {
pub fn is_buffer_empty(&self) -> bool {
self.buf.is_empty()
}
pub fn buffer_len(&self) -> usize {
self.buf.len()
}
pub fn consume<'a, 'b, 'c>(&'a mut self, new_data: &'b [u8]) -> ParserIter<'c>
where
'a: 'c,
{
match self
.buf
.iter()
.chain(new_data.iter())
.position(|x| *x == SYNC_CHAR_1)
{
Some(mut off) => {
if off >= self.buf.len() {
off -= self.buf.len();
self.buf.clear();
self.buf.extend_from_slice(&new_data[off..]);
off = 0;
} else {
self.buf.extend_from_slice(new_data);
}
ParserIter {
buf: &mut self.buf,
off,
}
}
None => {
self.buf.clear();
ParserIter {
buf: &mut self.buf,
off: 0,
}
}
}
}
}

/// Iterator over data stored in `Parser` buffer
pub struct ParserIter<'a> {
buf: &'a mut Vec<u8>,
off: usize,
}

impl<'a> Drop for ParserIter<'a> {
fn drop(&mut self) {
if self.off <= self.buf.len() {
self.buf.drain(0..self.off);
}
}
}

impl<'a> ParserIter<'a> {
/// Analog of `core::iter::Iterator::next`, should be switched to
/// trait implmentation after merge of https://github.com/rust-lang/rust/issues/44265
pub fn next(&mut self) -> Option<Result<PacketRef, ParserError>> {
while self.off < self.buf.len() {
let data = &self.buf[self.off..];
let pos = match data.iter().position(|x| *x == SYNC_CHAR_1) {
Some(x) => x,
None => return None,
};

if (pos + 1) >= data.len() {
return None;
}
if data[pos + 1] != SYNC_CHAR_2 {
self.off += pos + 1;
continue;
}

if (pos + 5) >= data.len() {
return None;
}

let pack_len: usize = u16::from_le_bytes([data[pos + 4], data[pos + 5]]).into();
if pack_len > MAX_PACK_LEN {
self.off += pos + 1;
continue;
}
if (pos + pack_len + 6 + 2 - 1) >= data.len() {
return None;
}
let (ck_a, ck_b) = ubx_checksum(&data[(pos + 2)..(pos + pack_len + 4 + 2)]);

let (expect_ck_a, expect_ck_b) =
(data[pos + 6 + pack_len], data[pos + 6 + pack_len + 1]);
if (ck_a, ck_b) != (expect_ck_a, expect_ck_b) {
self.off += pos + 2;
return Some(Err(ParserError::InvalidChecksum {
expect: u16::from_le_bytes([expect_ck_a, expect_ck_b]),
got: u16::from_le_bytes([ck_a, ck_b]),
}));
}
let msg_data = &data[(pos + 6)..(pos + 6 + pack_len)];
let class_id = data[pos + 2];
let msg_id = data[pos + 3];
self.off += pos + 6 + pack_len + 2;
return Some(match_packet(class_id, msg_id, msg_data));
}
None
}
}
161 changes: 0 additions & 161 deletions ublox/src/segmenter.rs

This file was deleted.

495 changes: 255 additions & 240 deletions ublox/src/serialport.rs

Large diffs are not rendered by default.

583 changes: 96 additions & 487 deletions ublox/src/ubx_packets.rs

Large diffs are not rendered by default.

600 changes: 600 additions & 0 deletions ublox/src/ubx_packets/packets.rs

Large diffs are not rendered by default.

65 changes: 65 additions & 0 deletions ublox/src/ubx_packets/types.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
use super::packets::*;
use chrono::prelude::*;

#[derive(Debug)]
pub struct Position {
pub lon: f64,
pub lat: f64,
pub alt: f64,
}

#[derive(Debug)]
pub struct Velocity {
pub speed: f64, // m/s over ground
pub heading: f64, // degrees
}

impl<'a> From<&NavPosLlhRef<'a>> for Position {
fn from(packet: &NavPosLlhRef<'a>) -> Self {
Position {
lon: packet.lon_degrees(),
lat: packet.lat_degrees(),
alt: packet.height_msl(),
}
}
}

impl<'a> From<&NavVelNedRef<'a>> for Velocity {
fn from(packet: &NavVelNedRef<'a>) -> Self {
Velocity {
speed: packet.ground_speed(),
heading: packet.heading_degrees(),
}
}
}

impl<'a> From<&NavPosVelTimeRef<'a>> for Position {
fn from(packet: &NavPosVelTimeRef<'a>) -> Self {
Position {
lon: packet.lon_degrees(),
lat: packet.lat_degrees(),
alt: packet.height_msl(),
}
}
}

impl<'a> From<&NavPosVelTimeRef<'a>> for Velocity {
fn from(packet: &NavPosVelTimeRef<'a>) -> Self {
Velocity {
speed: packet.ground_speed(),
heading: packet.heading_degrees(),
}
}
}

impl<'a> From<&NavPosVelTimeRef<'a>> for DateTime<Utc> {
fn from(sol: &NavPosVelTimeRef<'a>) -> Self {
let ns = if sol.nanosecond().is_negative() {
0
} else {
sol.nanosecond()
} as u32;
Utc.ymd(sol.year() as i32, sol.month().into(), sol.day().into())
.and_hms_nano(sol.hour().into(), sol.min().into(), sol.sec().into(), ns)
}
}
14 changes: 14 additions & 0 deletions ublox/tests/generator_test.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
use ublox::{CfgMsg3Builder, NavPosLlh, NavStatus};

#[test]
fn test_cfg_msg_simple() {
assert_eq!(
[0xb5, 0x62, 0x06, 0x01, 0x03, 0x00, 0x01, 0x02, 0x01, 0x0E, 0x47],
CfgMsg3Builder::set_rate_for::<NavPosLlh>(1).to_packet_bytes()
);

assert_eq!(
[0xb5, 0x62, 0x06, 0x01, 0x03, 0x00, 0x01, 0x03, 0x01, 0x0F, 0x49],
CfgMsg3Builder::set_rate_for::<NavStatus>(1).to_packet_bytes()
);
}
157 changes: 157 additions & 0 deletions ublox/tests/parser_tests.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,157 @@
use cpu_time::ProcessTime;
use rand::{thread_rng, Rng};
use std::{env, fs, path::Path};
use ublox::{PacketRef, Parser, ParserError, ParserIter};

#[test]
fn test_ack_ack_simple() {
type ParseResult = Result<(u8, u8), ParserError>;
fn extract_ack_ack(mut it: ParserIter) -> Vec<ParseResult> {
let mut ret = vec![];
while let Some(pack) = it.next() {
match pack {
Ok(PacketRef::AckAck(pack)) => {
ret.push(Ok((pack.class(), pack.msg_id())));
}
Err(err) => ret.push(Err(err)),
_ => assert!(false),
}
}
ret
}
macro_rules! my_vec {
($($x:expr),*) => {{
let v: Vec<ParseResult> = vec![$($x),*];
v
}}
}

let mut parser = Parser::default();
assert!(parser.is_buffer_empty());
assert_eq!(
my_vec![],
extract_ack_ack(parser.consume(&[])),
"empty buffer parsing"
);
assert!(parser.is_buffer_empty());

let full_pack = [0xb5, 0x62, 0x5, 0x1, 0x2, 0x0, 0x6, 0x1, 0xf, 0x38];
assert_eq!(
my_vec![Ok((6, 1))],
extract_ack_ack(parser.consume(&full_pack)),
"full packet parsing"
);
assert!(parser.is_buffer_empty());

let mut bad_pack = full_pack.clone();
bad_pack[bad_pack.len() - 3] = 5;
assert_eq!(
my_vec![Err(ParserError::InvalidChecksum {
expect: 0x380f,
got: 0x3c13
})],
extract_ack_ack(parser.consume(&bad_pack)),
"invalid checksum"
);
assert_eq!(bad_pack.len() - 2, parser.buffer_len());

let mut two_packs = full_pack.to_vec();
two_packs.extend_from_slice(&full_pack);
assert_eq!(
my_vec![Ok((6, 1)), Ok((6, 1))],
extract_ack_ack(parser.consume(&two_packs)),
"two packets"
);
assert!(parser.is_buffer_empty());

assert_eq!(
my_vec![],
extract_ack_ack(parser.consume(&full_pack[0..5])),
"part of packet"
);
assert_eq!(5, parser.buffer_len());
let mut rest_and_next = (&full_pack[5..]).to_vec();
rest_and_next.extend_from_slice(&full_pack);
assert_eq!(
my_vec![Ok((6, 1)), Ok((6, 1))],
extract_ack_ack(parser.consume(&two_packs)),
"two packets"
);
assert!(parser.is_buffer_empty());

let mut garbage_before = vec![0x00, 0x06, 0x01, 0x0f, 0x38];
garbage_before.extend_from_slice(&full_pack);
assert_eq!(
my_vec![Ok((6, 1))],
extract_ack_ack(parser.consume(&garbage_before)),
"garbage before1"
);

let mut garbage_before = vec![0xb5, 0xb5, 0x62, 0x62, 0x38];
garbage_before.extend_from_slice(&full_pack);
assert_eq!(
my_vec![Ok((6, 1))],
extract_ack_ack(parser.consume(&garbage_before)),
"garbage before1"
);
}

#[test]
#[ignore]
fn test_parse_big_file() {
let ubx_big_log_path = env::var("UBX_BIG_LOG_PATH").unwrap();
let ubx_big_log_path = Path::new(&ubx_big_log_path);

let biglog = fs::read(ubx_big_log_path).unwrap();
const MAX_SIZE: usize = 100;
let mut read_sizes = Vec::with_capacity(biglog.len() / MAX_SIZE / 2);
let mut rng = thread_rng();
let mut i = 0;
while i < biglog.len() {
let chunk: usize = rng.gen_range(1, MAX_SIZE);
let chunk = (biglog.len() - i).min(chunk);
read_sizes.push(chunk);
i += chunk;
}

let mut wrong_chksum = 0usize;
let mut other_errors = 0usize;
let mut nav_pos_llh = 0usize;
let mut nav_stat = 0usize;
let mut ack_ack = 0usize;
let mut unknown = 0usize;

let mut log = biglog.as_slice();
let mut parser = Parser::default();

let start = ProcessTime::now();
for chunk_size in &read_sizes {
let (buf, rest) = log.split_at(*chunk_size);
log = rest;
let mut it = parser.consume(buf);
while let Some(pack) = it.next() {
match pack {
Ok(pack) => match pack {
PacketRef::AckAck(_) => ack_ack += 1,
PacketRef::NavPosLlh(_) => nav_pos_llh += 1,
PacketRef::NavStatus(_) => nav_stat += 1,
_ => unknown += 1,
},
Err(ParserError::InvalidChecksum { .. }) => wrong_chksum += 1,
Err(_) => other_errors += 1,
}
}
}
let cpu_time = start.elapsed();
println!(
"parse time of {}: {:?}",
ubx_big_log_path.display(),
cpu_time
);
assert_eq!(0, wrong_chksum);
assert_eq!(0, other_errors);
assert_eq!(38291, nav_pos_llh);
assert_eq!(38291, nav_stat);
assert_eq!(120723, unknown);
assert_eq!(1, ack_ack);
}
9 changes: 6 additions & 3 deletions ublox_derive/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "ublox_derive"
version = "0.0.0"
version = "0.0.1"
authors = ["Lane Kolbly <lane@rscheme.org>"]
edition = "2018"
publish = false
@@ -11,5 +11,8 @@ proc-macro = true
[dependencies]
proc-macro2 = "1.0"
quote = "1.0"
syn = { version = "1.0.14", features = ["extra-traits"] }
proc-macro-error = "0.4.8"
syn = { version = "1.0.14", features = ["extra-traits", "full"] }

[dev-dependencies]
which = { version = "3.0", default-features = false }
proc-macro2 = { version = "1.0", features = ["span-locations"] }
613 changes: 613 additions & 0 deletions ublox_derive/src/input.rs

Large diffs are not rendered by default.

684 changes: 150 additions & 534 deletions ublox_derive/src/lib.rs

Large diffs are not rendered by default.

553 changes: 553 additions & 0 deletions ublox_derive/src/output.rs

Large diffs are not rendered by default.

570 changes: 570 additions & 0 deletions ublox_derive/src/tests.rs

Large diffs are not rendered by default.

176 changes: 176 additions & 0 deletions ublox_derive/src/types.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,176 @@
use proc_macro2::TokenStream;
use quote::quote;
use std::num::NonZeroUsize;
use syn::{Attribute, Ident, Type};

pub struct PackDesc {
pub name: String,
pub header: PackHeader,
pub comment: String,
pub fields: Vec<PackField>,
}

impl PackDesc {
/// if packet has variable size, then `None`
pub fn packet_payload_size(&self) -> Option<usize> {
PackDesc::fields_size(self.fields.iter())
}

pub fn packet_payload_size_except_last_field(&self) -> Option<usize> {
PackDesc::fields_size(self.fields.iter().rev().skip(1))
}

fn fields_size<'a, I: Iterator<Item = &'a PackField>>(iter: I) -> Option<usize> {
let mut ret: usize = 0;
for f in iter {
let size = f.size_bytes?;
ret = ret
.checked_add(size.get())
.expect("overflow during packet size calculation");
}
Some(ret)
}
}

pub struct PackHeader {
pub class: u8,
pub id: u8,
pub fixed_payload_len: Option<u16>,
pub flags: Vec<PacketFlag>,
}

pub struct PackField {
pub name: Ident,
pub ty: Type,
pub map: PackFieldMap,
pub comment: String,
pub size_bytes: Option<NonZeroUsize>,
}

impl PackField {
pub fn has_intermidiate_type(&self) -> bool {
self.map.map_type.is_some()
}
pub fn intermidiate_type(&self) -> &Type {
self.map
.map_type
.as_ref()
.map(|x| &x.ty)
.unwrap_or(&self.ty)
}
pub fn intermidiate_field_name(&self) -> &Ident {
self.map.alias.as_ref().unwrap_or(&self.name)
}
pub fn is_field_raw_ty_byte_array(&self) -> bool {
if let syn::Type::Array(ref fixed_array) = self.ty {
if *fixed_array.elem == syn::parse_quote!(u8) {
true
} else {
false
}
} else {
false
}
}
}

pub struct PackFieldMap {
pub map_type: Option<MapType>,
pub scale: Option<syn::LitFloat>,
pub alias: Option<Ident>,
pub convert_may_fail: bool,
pub get_as_ref: bool,
}

impl PackFieldMap {
pub fn is_none(&self) -> bool {
self.map_type.is_none() && self.scale.is_none() && self.alias.is_none()
}
pub fn none() -> Self {
Self {
map_type: None,
scale: None,
alias: None,
convert_may_fail: false,
get_as_ref: false,
}
}
}

pub struct MapType {
pub ty: Type,
pub from_fn: TokenStream,
pub is_valid_fn: TokenStream,
}

impl MapType {
pub fn new(ty: Type, convert_may_fail: bool) -> Self {
let from_fn = if !convert_may_fail {
quote! { <#ty>::from }
} else {
quote! { <#ty>::from_unchecked }
};
let is_valid_fn = quote! { <#ty>::is_valid };
Self {
ty,
from_fn,
is_valid_fn,
}
}
}

pub struct UbxExtendEnum {
pub name: Ident,
pub repr: Type,
pub from_fn: Option<UbxTypeFromFn>,
pub rest_handling: Option<UbxEnumRestHandling>,
pub into_fn: Option<UbxTypeIntoFn>,
pub variants: Vec<(Ident, u8)>,
pub attrs: Vec<syn::Attribute>,
}

#[derive(Clone, Copy, PartialEq)]
pub enum UbxTypeFromFn {
From,
FromUnchecked,
}

#[derive(Clone, Copy, PartialEq)]
pub enum UbxTypeIntoFn {
Raw,
}

#[derive(Clone, Copy, PartialEq, Debug)]
pub enum UbxEnumRestHandling {
Reserved,
ErrorProne,
}

pub struct BitFlagsMacro {
pub nbits: u32,
pub vis: syn::Visibility,
pub attrs: Vec<Attribute>,
pub name: Ident,
pub repr_ty: Type,
pub consts: Vec<BitFlagsMacroItem>,
pub from_fn: Option<UbxTypeFromFn>,
pub into_fn: Option<UbxTypeIntoFn>,
pub rest_handling: Option<UbxEnumRestHandling>,
}

pub struct BitFlagsMacroItem {
pub attrs: Vec<Attribute>,
pub name: Ident,
pub value: u64,
}

#[derive(PartialEq, Clone, Copy)]
pub enum PacketFlag {
DefaultForBuilder,
}

pub struct RecvPackets {
pub union_enum_name: Ident,
pub unknown_ty: Ident,
pub all_packets: Vec<Ident>,
}
35 changes: 0 additions & 35 deletions ublox_derive/tests/test.rs

This file was deleted.

0 comments on commit 5390375

Please sign in to comment.