Skip to content

Commit

Permalink
Auto merge of #77626 - tamird:parse-scope-id, r=dtolnay
Browse files Browse the repository at this point in the history
Parse SocketAddrV6::scope_id

r? `@dtolnay`
  • Loading branch information
bors committed Oct 7, 2020
2 parents 5779815 + 49ade22 commit c9ced85
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 13 deletions.
34 changes: 23 additions & 11 deletions library/std/src/net/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ macro_rules! impl_helper {
})*)
}

impl_helper! { u8 u16 }
impl_helper! { u8 u16 u32 }

struct Parser<'a> {
// parsing as ASCII, so can use byte array
Expand Down Expand Up @@ -75,9 +75,12 @@ impl<'a> Parser<'a> {
})
}

/// Read the next character from the input if it matches the target
fn read_given_char(&mut self, target: char) -> Option<char> {
self.read_atomically(|p| p.read_char().filter(|&c| c == target))
#[must_use]
/// Read the next character from the input if it matches the target.
fn read_given_char(&mut self, target: char) -> Option<()> {
self.read_atomically(|p| {
p.read_char().and_then(|c| if c == target { Some(()) } else { None })
})
}

/// Helper for reading separators in an indexed loop. Reads the separator
Expand All @@ -90,7 +93,7 @@ impl<'a> Parser<'a> {
{
self.read_atomically(move |p| {
if index > 0 {
let _ = p.read_given_char(sep)?;
p.read_given_char(sep)?;
}
inner(p)
})
Expand Down Expand Up @@ -187,8 +190,8 @@ impl<'a> Parser<'a> {

// read `::` if previous code parsed less than 8 groups
// `::` indicates one or more groups of 16 bits of zeros
let _ = p.read_given_char(':')?;
let _ = p.read_given_char(':')?;
p.read_given_char(':')?;
p.read_given_char(':')?;

// Read the back part of the address. The :: must contain at least one
// set of zeroes, so our max length is 7.
Expand All @@ -211,7 +214,15 @@ impl<'a> Parser<'a> {
/// Read a : followed by a port in base 10.
fn read_port(&mut self) -> Option<u16> {
self.read_atomically(|p| {
let _ = p.read_given_char(':')?;
p.read_given_char(':')?;
p.read_number(10, None)
})
}

/// Read a % followed by a scope id in base 10.
fn read_scope_id(&mut self) -> Option<u32> {
self.read_atomically(|p| {
p.read_given_char('%')?;
p.read_number(10, None)
})
}
Expand All @@ -228,12 +239,13 @@ impl<'a> Parser<'a> {
/// Read an IPV6 address with a port
fn read_socket_addr_v6(&mut self) -> Option<SocketAddrV6> {
self.read_atomically(|p| {
let _ = p.read_given_char('[')?;
p.read_given_char('[')?;
let ip = p.read_ipv6_addr()?;
let _ = p.read_given_char(']')?;
let scope_id = p.read_scope_id().unwrap_or(0);
p.read_given_char(']')?;

let port = p.read_port()?;
Some(SocketAddrV6::new(ip, port, 0, 0))
Some(SocketAddrV6::new(ip, port, 0, scope_id))
})
}

Expand Down
6 changes: 4 additions & 2 deletions library/std/src/net/parser/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ use crate::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr, SocketAddrV4, SocketAdd
use crate::str::FromStr;

const PORT: u16 = 8080;
const SCOPE_ID: u32 = 1337;

const IPV4: Ipv4Addr = Ipv4Addr::new(192, 168, 0, 1);
const IPV4_STR: &str = "192.168.0.1";
Expand All @@ -13,6 +14,7 @@ const IPV6_STR_FULL: &str = "2001:db8:0:0:0:0:c0a8:1";
const IPV6_STR_COMPRESS: &str = "2001:db8::c0a8:1";
const IPV6_STR_V4: &str = "2001:db8::192.168.0.1";
const IPV6_STR_PORT: &str = "[2001:db8::c0a8:1]:8080";
const IPV6_STR_PORT_SCOPE_ID: &str = "[2001:db8::c0a8:1%1337]:8080";

#[test]
fn parse_ipv4() {
Expand Down Expand Up @@ -74,8 +76,8 @@ fn parse_socket_v4() {

#[test]
fn parse_socket_v6() {
let result: SocketAddrV6 = IPV6_STR_PORT.parse().unwrap();
assert_eq!(result, SocketAddrV6::new(IPV6, PORT, 0, 0));
assert_eq!(IPV6_STR_PORT.parse(), Ok(SocketAddrV6::new(IPV6, PORT, 0, 0)));
assert_eq!(IPV6_STR_PORT_SCOPE_ID.parse(), Ok(SocketAddrV6::new(IPV6, PORT, 0, SCOPE_ID)));

assert!(SocketAddrV6::from_str(IPV4_STR).is_err());
assert!(SocketAddrV6::from_str(IPV4_STR_PORT).is_err());
Expand Down

0 comments on commit c9ced85

Please sign in to comment.