Skip to content

Generic AST #19

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Oct 21, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions benches/graphql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,29 +19,29 @@ fn load_file(name: &str) -> String {
#[bench]
fn bench_minimal(b: &mut test::Bencher) {
let f = load_file("minimal");
b.iter(|| parse_query(&f).unwrap());
b.iter(|| parse_query::<String>(&f).unwrap());
}

#[bench]
fn bench_inline_fragment(b: &mut test::Bencher) {
let f = load_file("inline_fragment");
b.iter(|| parse_query(&f).unwrap());
b.iter(|| parse_query::<String>(&f).unwrap());
}

#[bench]
fn bench_directive_args(b: &mut test::Bencher) {
let f = load_file("directive_args");
b.iter(|| parse_query(&f).unwrap());
b.iter(|| parse_query::<String>(&f).unwrap());
}

#[bench]
fn bench_query_vars(b: &mut test::Bencher) {
let f = load_file("query_vars");
b.iter(|| parse_query(&f).unwrap());
b.iter(|| parse_query::<String>(&f).unwrap());
}

#[bench]
fn bench_kitchen_sink(b: &mut test::Bencher) {
let f = load_file("kitchen-sink");
b.iter(|| parse_query(&f).unwrap());
b.iter(|| parse_query::<String>(&f).unwrap());
}
115 changes: 70 additions & 45 deletions src/common.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use std::collections::BTreeMap;
use std::{fmt, collections::BTreeMap};

use combine::{parser, ParseResult, Parser};
use combine::easy::Error;
Expand All @@ -9,15 +9,29 @@ use tokenizer::{Kind as T, Token, TokenStream};
use helpers::{punct, ident, kind, name};
use position::Pos;

/// Text abstracts over types that hold a string value.
/// It is used to make the AST generic over the string type.
pub trait Text<'a>: 'a {
type Value: 'a + From<&'a str> + AsRef<str> + std::borrow::Borrow<str> + PartialEq + Eq + PartialOrd + Ord + fmt::Debug + Clone;
}

impl<'a> Text<'a> for &'a str {
type Value = Self;
}

/// An alias for string, used where graphql expects a name
pub type Name = String;
impl<'a> Text<'a> for String {
type Value = String;
}

impl<'a> Text<'a> for std::borrow::Cow<'a, str> {
type Value = Self;
}

#[derive(Debug, Clone, PartialEq)]
pub struct Directive {
pub struct Directive<'a, T: Text<'a>> {
pub position: Pos,
pub name: Name,
pub arguments: Vec<(Name, Value)>,
pub name: T::Value,
pub arguments: Vec<(T::Value, Value<'a, T>)>,
}

/// This represents integer number
Expand All @@ -32,23 +46,23 @@ pub struct Directive {
pub struct Number(pub(crate) i64);

#[derive(Debug, Clone, PartialEq)]
pub enum Value {
Variable(Name),
pub enum Value<'a, T: Text<'a>> {
Variable(T::Value),
Int(Number),
Float(f64),
String(String),
Boolean(bool),
Null,
Enum(Name),
List(Vec<Value>),
Object(BTreeMap<Name, Value>),
Enum(T::Value),
List(Vec<Value<'a, T>>),
Object(BTreeMap<T::Value, Value<'a, T>>),
}

#[derive(Debug, Clone, PartialEq)]
pub enum Type {
NamedType(Name),
ListType(Box<Type>),
NonNullType(Box<Type>),
pub enum Type<'a, T: Text<'a>> {
NamedType(T::Value),
ListType(Box<Type<'a, T>>),
NonNullType(Box<Type<'a, T>>),
}

impl Number {
Expand All @@ -64,25 +78,27 @@ impl From<i32> for Number {
}
}

pub fn directives<'a>(input: &mut TokenStream<'a>)
-> ParseResult<Vec<Directive>, TokenStream<'a>>
pub fn directives<'a, T>(input: &mut TokenStream<'a>)
-> ParseResult<Vec<Directive<'a, T>>, TokenStream<'a>>
where T: Text<'a>,
{
many(position()
.skip(punct("@"))
.and(name())
.and(name::<'a, T>())
.and(parser(arguments))
.map(|((position, name), arguments)| {
Directive { position, name, arguments }
}))
.parse_stream(input)
}

pub fn arguments<'a>(input: &mut TokenStream<'a>)
-> ParseResult<Vec<(String, Value)>, TokenStream<'a>>
pub fn arguments<'a, T>(input: &mut TokenStream<'a>)
-> ParseResult<Vec<(T::Value, Value<'a, T>)>, TokenStream<'a>>
where T: Text<'a>,
{
optional(
punct("(")
.with(many1(name()
.with(many1(name::<'a, T>()
.skip(punct(":"))
.and(parser(value))))
.skip(punct(")")))
Expand All @@ -92,27 +108,29 @@ pub fn arguments<'a>(input: &mut TokenStream<'a>)
.parse_stream(input)
}

pub fn int_value<'a>(input: &mut TokenStream<'a>)
-> ParseResult<Value, TokenStream<'a>>
pub fn int_value<'a, S>(input: &mut TokenStream<'a>)
-> ParseResult<Value<'a, S>, TokenStream<'a>>
where S: Text<'a>
{
kind(T::IntValue).and_then(|tok| tok.value.parse())
.map(Number).map(Value::Int)
.parse_stream(input)
}

pub fn float_value<'a>(input: &mut TokenStream<'a>)
-> ParseResult<Value, TokenStream<'a>>
pub fn float_value<'a, S>(input: &mut TokenStream<'a>)
-> ParseResult<Value<'a, S>, TokenStream<'a>>
where S: Text<'a>
{
kind(T::FloatValue).and_then(|tok| tok.value.parse())
.map(Value::Float)
.parse_stream(input)
}

fn unquote_block_string(src: &str) -> Result<String, Error<Token, Token>> {
fn unquote_block_string<'a>(src: &'a str) -> Result<String, Error<Token<'a>, Token<'a>>> {
debug_assert!(src.starts_with("\"\"\"") && src.ends_with("\"\"\""));
let indent = src[3..src.len()-3].lines().skip(1)
.filter_map(|line| {
let trimmed = line.trim_left().len();
let trimmed = line.trim_start().len();
if trimmed > 0 {
Some(line.len() - trimmed)
} else {
Expand Down Expand Up @@ -144,7 +162,8 @@ fn unquote_block_string(src: &str) -> Result<String, Error<Token, Token>> {
Ok(result)
}

fn unquote_string(s: &str) -> Result<String, Error<Token, Token>> {
fn unquote_string<'a>(s: &'a str) -> Result<String, Error<Token, Token>>
{
let mut res = String::with_capacity(s.len());
debug_assert!(s.starts_with('"') && s.ends_with('"'));
let mut chars = s[1..s.len()-1].chars();
Expand Down Expand Up @@ -183,67 +202,73 @@ pub fn string<'a>(input: &mut TokenStream<'a>)
)).parse_stream(input)
}

pub fn string_value<'a>(input: &mut TokenStream<'a>)
-> ParseResult<Value, TokenStream<'a>>
pub fn string_value<'a, S>(input: &mut TokenStream<'a>)
-> ParseResult<Value<'a, S>, TokenStream<'a>>
where S: Text<'a>,
{
kind(T::StringValue).and_then(|tok| unquote_string(tok.value))
.map(Value::String)
.parse_stream(input)
}

pub fn block_string_value<'a>(input: &mut TokenStream<'a>)
-> ParseResult<Value, TokenStream<'a>>
pub fn block_string_value<'a, S>(input: &mut TokenStream<'a>)
-> ParseResult<Value<'a, S>, TokenStream<'a>>
where S: Text<'a>,
{
kind(T::BlockString).and_then(|tok| unquote_block_string(tok.value))
.map(Value::String)
.parse_stream(input)
}

pub fn plain_value<'a>(input: &mut TokenStream<'a>)
-> ParseResult<Value, TokenStream<'a>>
pub fn plain_value<'a, T>(input: &mut TokenStream<'a>)
-> ParseResult<Value<'a, T>, TokenStream<'a>>
where T: Text<'a>,
{
ident("true").map(|_| Value::Boolean(true))
.or(ident("false").map(|_| Value::Boolean(false)))
.or(ident("null").map(|_| Value::Null))
.or(name().map(Value::Enum))
.or(name::<'a, T>().map(Value::Enum))
.or(parser(int_value))
.or(parser(float_value))
.or(parser(string_value))
.or(parser(block_string_value))
.parse_stream(input)
}

pub fn value<'a>(input: &mut TokenStream<'a>)
-> ParseResult<Value, TokenStream<'a>>
pub fn value<'a, T>(input: &mut TokenStream<'a>)
-> ParseResult<Value<'a, T>, TokenStream<'a>>
where T: Text<'a>,
{
parser(plain_value)
.or(punct("$").with(name()).map(Value::Variable))
.or(punct("$").with(name::<'a, T>()).map(Value::Variable))
.or(punct("[").with(many(parser(value))).skip(punct("]"))
.map(Value::List))
.or(punct("{")
.with(many(name().skip(punct(":")).and(parser(value))))
.with(many(name::<'a, T>().skip(punct(":")).and(parser(value))))
.skip(punct("}"))
.map(Value::Object))
.parse_stream(input)
}

pub fn default_value<'a>(input: &mut TokenStream<'a>)
-> ParseResult<Value, TokenStream<'a>>
pub fn default_value<'a, T>(input: &mut TokenStream<'a>)
-> ParseResult<Value<'a, T>, TokenStream<'a>>
where T: Text<'a>,
{
parser(plain_value)
.or(punct("[").with(many(parser(default_value))).skip(punct("]"))
.map(Value::List))
.or(punct("{")
.with(many(name().skip(punct(":")).and(parser(default_value))))
.with(many(name::<'a, T>().skip(punct(":")).and(parser(default_value))))
.skip(punct("}"))
.map(Value::Object))
.parse_stream(input)
}

pub fn parse_type<'a>(input: &mut TokenStream<'a>)
-> ParseResult<Type, TokenStream<'a>>
pub fn parse_type<'a, T>(input: &mut TokenStream<'a>)
-> ParseResult<Type<'a, T>, TokenStream<'a>>
where T: Text<'a>,
{
name().map(Type::NamedType)
name::<'a, T>().map(Type::NamedType)
.or(punct("[")
.with(parser(parse_type))
.skip(punct("]"))
Expand Down
20 changes: 17 additions & 3 deletions src/format.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ impl<'a> Formatter<'a> {
for c in s.chars() {
match c {
'\n' => has_newline = true,
'\r' | '\t' | '\u{0020}'...'\u{FFFF}' => {}
'\r' | '\t' | '\u{0020}'..='\u{FFFF}' => {}
_ => has_nonprintable = true,
}
}
Expand All @@ -107,7 +107,7 @@ impl<'a> Formatter<'a> {
'\t' => self.write(r"\t"),
'"' => self.write("\\\""),
'\\' => self.write(r"\\"),
'\u{0020}'...'\u{FFFF}' => self.buf.push(c),
'\u{0020}'..='\u{FFFF}' => self.buf.push(c),
_ => write!(&mut self.buf, "\\u{:04}", c as u32).unwrap(),
}
}
Expand All @@ -130,7 +130,9 @@ impl<'a> Formatter<'a> {
}
}

pub(crate) fn format_directives(dirs: &[Directive], f: &mut Formatter) {
pub(crate) fn format_directives<'a, T>(dirs: &[Directive<'a, T>], f: &mut Formatter)
where T: ::common::Text<'a>,
{
for dir in dirs {
f.write(" ");
dir.display(f);
Expand All @@ -147,4 +149,16 @@ macro_rules! impl_display {
}
)+
};

('a $($typ: ident, )+) => {
$(
impl<'a, T> fmt::Display for $typ<'a, T>
where T: Text<'a>,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(&to_string(self))
}
}
)+
};
}
24 changes: 16 additions & 8 deletions src/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ use combine::stream::easy::{Error, Errors, Info};
use tokenizer::{TokenStream, Kind, Token};
use position::Pos;

use super::common::{Text};


#[derive(Debug, Clone)]
pub struct TokenMatch<'a> {
Expand All @@ -15,8 +17,10 @@ pub struct TokenMatch<'a> {
}

#[derive(Debug, Clone)]
pub struct NameMatch<'a> {
phantom: PhantomData<&'a u8>,
pub struct NameMatch<'a, T>
where T: Text<'a>
{
phantom: PhantomData<(&'a T)>,
}

#[derive(Debug, Clone)]
Expand All @@ -34,7 +38,9 @@ pub fn kind<'x>(kind: Kind) -> TokenMatch<'x> {
}
}

pub fn name<'x>() -> NameMatch<'x> {
pub fn name<'a, T>() -> NameMatch<'a, T>
where T: Text<'a>
{
NameMatch {
phantom: PhantomData,
}
Expand All @@ -60,15 +66,15 @@ impl<'a> Parser for TokenMatch<'a> {
}
}

pub fn punct<'x>(value: &'static str) -> Value<'x> {
pub fn punct<'s>(value: &'static str) -> Value<'s> {
Value {
kind: Kind::Punctuator,
value: value,
phantom: PhantomData,
}
}

pub fn ident<'x>(value: &'static str) -> Value<'x> {
pub fn ident<'s>(value: &'static str) -> Value<'s> {
Value {
kind: Kind::Name,
value: value,
Expand Down Expand Up @@ -97,17 +103,19 @@ impl<'a> Parser for Value<'a> {
}
}

impl<'a> Parser for NameMatch<'a> {
impl<'a, S> Parser for NameMatch<'a, S>
where S: Text<'a>,
{
type Input = TokenStream<'a>;
type Output = String;
type Output = S::Value;
type PartialState = ();

#[inline]
fn parse_lazy(&mut self, input: &mut Self::Input)
-> ConsumedResult<Self::Output, Self::Input>
{
satisfy(|c: Token<'a>| c.kind == Kind::Name)
.map(|t: Token<'a>| t.value.to_string())
.map(|t: Token<'a>| -> S::Value { S::Value::from(t.value) } )
.parse_lazy(input)
}

Expand Down
Loading