diff --git a/src/lib.rs b/src/lib.rs index 3649bdf..af31856 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -91,19 +91,20 @@ //! #![warn(missing_debug_implementations)] -#[cfg(test)] #[macro_use] extern crate pretty_assertions; - +#[cfg(test)] +#[macro_use] +extern crate pretty_assertions; mod common; #[macro_use] mod format; -mod position; -mod tokenizer; mod helpers; +mod position; pub mod query; pub mod schema; +mod tokenizer; -pub use crate::query::parse_query; -pub use crate::schema::parse_schema; -pub use crate::position::Pos; pub use crate::format::Style; +pub use crate::position::Pos; +pub use crate::query::{consume_query, parse_query}; +pub use crate::schema::parse_schema; diff --git a/src/query/ast.rs b/src/query/ast.rs index 62e2674..1be1c7f 100644 --- a/src/query/ast.rs +++ b/src/query/ast.rs @@ -5,8 +5,8 @@ //! //! [graphql grammar]: http://facebook.github.io/graphql/October2016/#sec-Appendix-Grammar-Summary //! +pub use crate::common::{Directive, Number, Text, Type, Value}; use crate::position::Pos; -pub use crate::common::{Directive, Number, Value, Text, Type}; /// Root of query data #[derive(Debug, Clone, PartialEq)] @@ -17,9 +17,9 @@ pub struct Document<'a, T: Text<'a>> { impl<'a> Document<'a, String> { pub fn into_static(self) -> Document<'static, String> { // To support both reference and owned values in the AST, - // all string data is represented with the ::common::Str<'a, T: Text<'a>> + // all string data is represented with the ::common::Str<'a, T: Text<'a>> // wrapper type. - // This type must carry the liftetime of the query string, + // This type must carry the lifetime of the query string, // and is stored in a PhantomData value on the Str type. // When using owned String types, the actual lifetime of // the Ast nodes is 'static, since no references are kept, diff --git a/src/query/grammar.rs b/src/query/grammar.rs index 08c69ce..6dc01eb 100644 --- a/src/query/grammar.rs +++ b/src/query/grammar.rs @@ -1,16 +1,16 @@ +use combine::combinator::{eof, many1, optional, position}; use combine::{parser, ParseResult, Parser}; -use combine::combinator::{many1, eof, optional, position}; -use crate::common::{Directive}; -use crate::common::{directives, arguments, default_value, parse_type}; -use crate::tokenizer::{TokenStream}; -use crate::helpers::{punct, ident, name}; -use crate::query::error::{ParseError}; +use crate::common::Directive; +use crate::common::{arguments, default_value, directives, parse_type}; +use crate::helpers::{ident, name, punct}; use crate::query::ast::*; +use crate::query::error::ParseError; +use crate::tokenizer::TokenStream; -pub fn field<'a, S>(input: &mut TokenStream<'a>) - -> ParseResult, TokenStream<'a>> - where S: Text<'a> +pub fn field<'a, S>(input: &mut TokenStream<'a>) -> ParseResult, TokenStream<'a>> +where + S: Text<'a>, { ( position(), @@ -19,74 +19,101 @@ pub fn field<'a, S>(input: &mut TokenStream<'a>) parser(arguments), parser(directives), optional(parser(selection_set)), - ).map(|(position, name_or_alias, opt_name, arguments, directives, sel)| { - let (name, alias) = match opt_name { - Some(name) => (name, Some(name_or_alias)), - None => (name_or_alias, None), - }; - Field { - position, name, alias, arguments, directives, - selection_set: sel.unwrap_or_else(|| { - SelectionSet { - span: (position, position), - items: Vec::new(), + ) + .map( + |(position, name_or_alias, opt_name, arguments, directives, sel)| { + let (name, alias) = match opt_name { + Some(name) => (name, Some(name_or_alias)), + None => (name_or_alias, None), + }; + Field { + position, + name, + alias, + arguments, + directives, + selection_set: sel.unwrap_or_else(|| SelectionSet { + span: (position, position), + items: Vec::new(), + }), } - }), - } - }) - .parse_stream(input) + }, + ) + .parse_stream(input) } -pub fn selection<'a, S>(input: &mut TokenStream<'a>) - -> ParseResult, TokenStream<'a>> - where S: Text<'a> +pub fn selection<'a, S>( + input: &mut TokenStream<'a>, +) -> ParseResult, TokenStream<'a>> +where + S: Text<'a>, { - parser(field).map(Selection::Field) - .or(punct("...").with(( + parser(field) + .map(Selection::Field) + .or(punct("...").with( + ( position(), optional(ident("on").with(name::<'a, S>()).map(TypeCondition::On)), parser(directives), parser(selection_set), - ).map(|(position, type_condition, directives, selection_set)| { - InlineFragment { position, type_condition, - selection_set, directives } - }) - .map(Selection::InlineFragment) - .or((position(), - name::<'a, S>(), - parser(directives), - ).map(|(position, fragment_name, directives)| { - FragmentSpread { position, fragment_name, directives } - }) - .map(Selection::FragmentSpread)) - )) - .parse_stream(input) + ) + .map( + |(position, type_condition, directives, selection_set)| InlineFragment { + position, + type_condition, + selection_set, + directives, + }, + ) + .map(Selection::InlineFragment) + .or((position(), name::<'a, S>(), parser(directives)) + .map(|(position, fragment_name, directives)| FragmentSpread { + position, + fragment_name, + directives, + }) + .map(Selection::FragmentSpread)), + )) + .parse_stream(input) } -pub fn selection_set<'a, S>(input: &mut TokenStream<'a>) - -> ParseResult, TokenStream<'a>> - where S: Text<'a>, +pub fn selection_set<'a, S>( + input: &mut TokenStream<'a>, +) -> ParseResult, TokenStream<'a>> +where + S: Text<'a>, { ( position().skip(punct("{")), many1(parser(selection)), position().skip(punct("}")), - ).map(|(start, items, end)| SelectionSet { span: (start, end), items }) - .parse_stream(input) + ) + .map(|(start, items, end)| SelectionSet { + span: (start, end), + items, + }) + .parse_stream(input) } -pub fn query<'a, T: Text<'a>>(input: &mut TokenStream<'a>) - -> ParseResult, TokenStream<'a>> - where T: Text<'a>, +pub fn query<'a, T: Text<'a>>( + input: &mut TokenStream<'a>, +) -> ParseResult, TokenStream<'a>> +where + T: Text<'a>, { position() - .skip(ident("query")) - .and(parser(operation_common)) - .map(|(position, (name, variable_definitions, directives, selection_set))| - Query { - position, name, selection_set, variable_definitions, directives, - }) - .parse_stream(input) + .skip(ident("query")) + .and(parser(operation_common)) + .map( + |(position, (name, variable_definitions, directives, selection_set))| Query { + position, + name, + selection_set, + variable_definitions, + directives, + }, + ) + .parse_stream(input) } /// A set of attributes common to a Query and a Mutation @@ -98,103 +125,139 @@ type OperationCommon<'a, T: Text<'a>> = ( SelectionSet<'a, T>, ); -pub fn operation_common<'a, T: Text<'a>>(input: &mut TokenStream<'a>) - -> ParseResult, TokenStream<'a>> - where T: Text<'a>, +pub fn operation_common<'a, T: Text<'a>>( + input: &mut TokenStream<'a>, +) -> ParseResult, TokenStream<'a>> +where + T: Text<'a>, { optional(name::<'a, T>()) - .and(optional( - punct("(") - .with(many1( - ( - position(), - punct("$").with(name::<'a, T>()).skip(punct(":")), - parser(parse_type), - optional( - punct("=") - .with(parser(default_value))), - ).map(|(position, name, var_type, default_value)| { - VariableDefinition { - position, name, var_type, default_value, - } - }))) - .skip(punct(")"))) - .map(|vars| vars.unwrap_or_else(Vec::new))) - .and(parser(directives)) - .and(parser(selection_set)) - .map(|(((a, b), c), d)| (a, b, c, d)) - .parse_stream(input) + .and( + optional( + punct("(") + .with(many1( + ( + position(), + punct("$").with(name::<'a, T>()).skip(punct(":")), + parser(parse_type), + optional(punct("=").with(parser(default_value))), + ) + .map( + |(position, name, var_type, default_value)| VariableDefinition { + position, + name, + var_type, + default_value, + }, + ), + )) + .skip(punct(")")), + ) + .map(|vars| vars.unwrap_or_else(Vec::new)), + ) + .and(parser(directives)) + .and(parser(selection_set)) + .map(|(((a, b), c), d)| (a, b, c, d)) + .parse_stream(input) } -pub fn mutation<'a, T: Text<'a>>(input: &mut TokenStream<'a>) - -> ParseResult, TokenStream<'a>> - where T: Text<'a>, +pub fn mutation<'a, T: Text<'a>>( + input: &mut TokenStream<'a>, +) -> ParseResult, TokenStream<'a>> +where + T: Text<'a>, { position() - .skip(ident("mutation")) - .and(parser(operation_common)) - .map(|(position, (name, variable_definitions, directives, selection_set))| - Mutation { - position, name, selection_set, variable_definitions, directives, - }) - .parse_stream(input) + .skip(ident("mutation")) + .and(parser(operation_common)) + .map( + |(position, (name, variable_definitions, directives, selection_set))| Mutation { + position, + name, + selection_set, + variable_definitions, + directives, + }, + ) + .parse_stream(input) } -pub fn subscription<'a, T: Text<'a>>(input: &mut TokenStream<'a>) - -> ParseResult, TokenStream<'a>> - where T: Text<'a>, +pub fn subscription<'a, T: Text<'a>>( + input: &mut TokenStream<'a>, +) -> ParseResult, TokenStream<'a>> +where + T: Text<'a>, { position() - .skip(ident("subscription")) - .and(parser(operation_common)) - .map(|(position, (name, variable_definitions, directives, selection_set))| - Subscription { - position, name, selection_set, variable_definitions, directives, - }) - .parse_stream(input) + .skip(ident("subscription")) + .and(parser(operation_common)) + .map( + |(position, (name, variable_definitions, directives, selection_set))| Subscription { + position, + name, + selection_set, + variable_definitions, + directives, + }, + ) + .parse_stream(input) } -pub fn operation_definition<'a, S>(input: &mut TokenStream<'a>) - -> ParseResult, TokenStream<'a>> - where S: Text<'a>, +pub fn operation_definition<'a, S>( + input: &mut TokenStream<'a>, +) -> ParseResult, TokenStream<'a>> +where + S: Text<'a>, { - parser(selection_set).map(OperationDefinition::SelectionSet) - .or(parser(query).map(OperationDefinition::Query)) - .or(parser(mutation).map(OperationDefinition::Mutation)) - .or(parser(subscription).map(OperationDefinition::Subscription)) - .parse_stream(input) + parser(selection_set) + .map(OperationDefinition::SelectionSet) + .or(parser(query).map(OperationDefinition::Query)) + .or(parser(mutation).map(OperationDefinition::Mutation)) + .or(parser(subscription).map(OperationDefinition::Subscription)) + .parse_stream(input) } -pub fn fragment_definition<'a, T: Text<'a>>(input: &mut TokenStream<'a>) - -> ParseResult, TokenStream<'a>> - where T: Text<'a>, +pub fn fragment_definition<'a, T: Text<'a>>( + input: &mut TokenStream<'a>, +) -> ParseResult, TokenStream<'a>> +where + T: Text<'a>, { ( position().skip(ident("fragment")), name::<'a, T>(), ident("on").with(name::<'a, T>()).map(TypeCondition::On), parser(directives), - parser(selection_set) - ).map(|(position, name, type_condition, directives, selection_set)| { - FragmentDefinition { - position, name, type_condition, directives, selection_set, - } - }) - .parse_stream(input) + parser(selection_set), + ) + .map( + |(position, name, type_condition, directives, selection_set)| FragmentDefinition { + position, + name, + type_condition, + directives, + selection_set, + }, + ) + .parse_stream(input) } -pub fn definition<'a, S>(input: &mut TokenStream<'a>) - -> ParseResult, TokenStream<'a>> - where S: Text<'a>, +pub fn definition<'a, S>( + input: &mut TokenStream<'a>, +) -> ParseResult, TokenStream<'a>> +where + S: Text<'a>, { - parser(operation_definition).map(Definition::Operation) - .or(parser(fragment_definition).map(Definition::Fragment)) - .parse_stream(input) + parser(operation_definition) + .map(Definition::Operation) + .or(parser(fragment_definition).map(Definition::Fragment)) + .parse_stream(input) } /// Parses a piece of query language and returns an AST -pub fn parse_query<'a, S>(s: &'a str) -> Result, ParseError> - where S: Text<'a>, +pub fn parse_query<'a, S>(s: &'a str) -> Result, ParseError> +where + S: Text<'a>, { let mut tokens = TokenStream::new(s); let (doc, _) = many1(parser(definition)) @@ -206,11 +269,23 @@ pub fn parse_query<'a, S>(s: &'a str) -> Result, ParseError> Ok(doc) } +/// Parses a single ExecutableDefinition and returns an AST as well as the remainder of the input +pub fn consume_query<'a, S>(s: &'a str) -> Result<(Definition<'a, S>, &'a str), ParseError> +where + S: Text<'a>, +{ + let tokens = TokenStream::new(s); + + let (doc, tokens) = parser(definition).parse(tokens)?; + + Ok((doc, &s[tokens.offset()..])) +} + #[cfg(test)] mod test { + use super::{consume_query, parse_query}; use crate::position::Pos; use crate::query::grammar::*; - use super::parse_query; fn ast(s: &str) -> Document { parse_query::(&s).unwrap().to_owned() @@ -218,66 +293,62 @@ mod test { #[test] fn one_field() { - assert_eq!(ast("{ a }"), Document { - definitions: vec![ - Definition::Operation(OperationDefinition::SelectionSet( + assert_eq!( + ast("{ a }"), + Document { + definitions: vec![Definition::Operation(OperationDefinition::SelectionSet( SelectionSet { - span: (Pos { line: 1, column: 1 }, - Pos { line: 1, column: 5 }), - items: vec![ - Selection::Field(Field { - position: Pos { line: 1, column: 3 }, - alias: None, - name: "a".into(), - arguments: Vec::new(), - directives: Vec::new(), - selection_set: SelectionSet { - span: (Pos { line: 1, column: 3 }, - Pos { line: 1, column: 3 }), - items: Vec::new() - }, - }), - ], + span: (Pos { line: 1, column: 1 }, Pos { line: 1, column: 5 }), + items: vec![Selection::Field(Field { + position: Pos { line: 1, column: 3 }, + alias: None, + name: "a".into(), + arguments: Vec::new(), + directives: Vec::new(), + selection_set: SelectionSet { + span: (Pos { line: 1, column: 3 }, Pos { line: 1, column: 3 }), + items: Vec::new() + }, + }),], } - )) - ], - }); + ))], + } + ); } #[test] fn builtin_values() { - assert_eq!(ast("{ a(t: true, f: false, n: null) }"), + assert_eq!( + ast("{ a(t: true, f: false, n: null) }"), Document { - definitions: vec![ - Definition::Operation(OperationDefinition::SelectionSet( - SelectionSet { - span: (Pos { line: 1, column: 1 }, - Pos { line: 1, column: 33 }), - items: vec![ - Selection::Field(Field { - position: Pos { line: 1, column: 3 }, - alias: None, - name: "a".into(), - arguments: vec![ - ("t".into(), - Value::Boolean(true)), - ("f".into(), - Value::Boolean(false)), - ("n".into(), - Value::Null), - ], - directives: Vec::new(), - selection_set: SelectionSet { - span: (Pos { line: 1, column: 3 }, - Pos { line: 1, column: 3 }), - items: Vec::new() - }, - }), + definitions: vec![Definition::Operation(OperationDefinition::SelectionSet( + SelectionSet { + span: ( + Pos { line: 1, column: 1 }, + Pos { + line: 1, + column: 33 + } + ), + items: vec![Selection::Field(Field { + position: Pos { line: 1, column: 3 }, + alias: None, + name: "a".into(), + arguments: vec![ + ("t".into(), Value::Boolean(true)), + ("f".into(), Value::Boolean(false)), + ("n".into(), Value::Null), ], - } - )) - ], - }); + directives: Vec::new(), + selection_set: SelectionSet { + span: (Pos { line: 1, column: 3 }, Pos { line: 1, column: 3 }), + items: Vec::new() + }, + }),], + } + ))], + } + ); } #[test] @@ -286,8 +357,31 @@ mod test { } #[test] - #[should_panic(expected="number too large")] + #[should_panic(expected = "number too large")] fn large_integer() { ast("{ a(x: 10000000000000000000000000000 }"); } + + #[test] + fn consume_single_query() { + let (query, remainder) = consume_query::("query { a } query { b }").unwrap(); + assert!(matches!(query, Definition::Operation(_))); + assert_eq!(remainder, "query { b }"); + } + + #[test] + fn consume_single_query_with_not_graphql() { + let (query, remainder) = + consume_query::("query { a } where a > 1 => 10.0").unwrap(); + assert!(matches!(query, Definition::Operation(_))); + assert_eq!(remainder, "where a > 1 => 10.0"); + } + + #[test] + fn consume_fails_without_operation() { + let err = consume_query::("where a > 1 => 10.0") + .expect_err("Expected parse to fail with an error"); + let err = format!("{}", err); + assert_eq!(err, "query parse error: Parse error at 1:1\nUnexpected `where[Name]`\nExpected `{`, `query`, `mutation`, `subscription` or `fragment`\n"); + } } diff --git a/src/query/mod.rs b/src/query/mod.rs index d1cfcf0..308a512 100644 --- a/src/query/mod.rs +++ b/src/query/mod.rs @@ -5,7 +5,6 @@ mod error; mod format; mod grammar; - -pub use self::grammar::parse_query; -pub use self::error::ParseError; pub use self::ast::*; +pub use self::error::ParseError; +pub use self::grammar::{consume_query, parse_query}; diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 72cea08..78d951d 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -1,13 +1,12 @@ use std::fmt; -use combine::{StreamOnce, Positioned}; -use combine::error::{StreamError}; -use combine::stream::{Resetable}; use combine::easy::{Error, Errors}; +use combine::error::StreamError; +use combine::stream::Resetable; +use combine::{Positioned, StreamOnce}; use crate::position::Pos; - #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum Kind { Punctuator, @@ -24,12 +23,21 @@ pub struct Token<'a> { pub value: &'a str, } +const RECURSION_LIMIT: isize = 20; + #[derive(Debug, PartialEq)] pub struct TokenStream<'a> { buf: &'a str, position: Pos, off: usize, next_state: Option<(usize, Token<'a>, usize, Pos)>, + recursion: isize, +} + +impl TokenStream<'_> { + pub(crate) fn offset(&self) -> usize { + self.off + } } #[derive(Clone, Debug, PartialEq)] @@ -54,7 +62,7 @@ impl<'a> StreamOnce for TokenStream<'a> { } let old_pos = self.off; let (kind, len) = self.peek_token()?; - let value = &self.buf[self.off-len..self.off]; + let value = &self.buf[self.off - len..self.off]; self.skip_whitespace(); let token = Token { kind, value }; self.next_state = Some((old_pos, token, self.off, self.position)); @@ -85,9 +93,12 @@ impl<'a> Resetable for TokenStream<'a> { // NOTE: we expect that first character is always digit or minus, as returned // by tokenizer fn check_int(value: &str) -> bool { - value == "0" || value == "-0" || - (!value.starts_with('0') && value != "-" && !value.starts_with("-0") - && value[1..].chars().all(|x| x >= '0' && x <= '9')) + value == "0" + || value == "-0" + || (!value.starts_with('0') + && value != "-" + && !value.starts_with("-0") + && value[1..].chars().all(|x| x >= '0' && x <= '9')) } fn check_dec(value: &str) -> bool { @@ -106,19 +117,14 @@ fn check_exp(value: &str) -> bool { value[1..].chars().all(|x| x >= '0' && x <= '9') } -fn check_float(value: &str, exponent: Option, real: Option) - -> bool -{ +fn check_float(value: &str, exponent: Option, real: Option) -> bool { match (exponent, real) { (Some(e), Some(r)) if e < r => false, - (Some(e), Some(r)) - => check_int(&value[..r]) && - check_dec(&value[r+1..e]) && - check_exp(&value[e+1..]), - (Some(e), None) - => check_int(&value[..e]) && check_exp(&value[e+1..]), - (None, Some(r)) - => check_int(&value[..r]) && check_dec(&value[r+1..]), + (Some(e), Some(r)) => { + check_int(&value[..r]) && check_dec(&value[r + 1..e]) && check_exp(&value[e + 1..]) + } + (Some(e), None) => check_int(&value[..e]) && check_exp(&value[e + 1..]), + (None, Some(r)) => check_int(&value[..r]) && check_dec(&value[r + 1..]), (None, None) => unreachable!(), } } @@ -130,14 +136,13 @@ impl<'a> TokenStream<'a> { position: Pos { line: 1, column: 1 }, off: 0, next_state: None, + recursion: 0, }; me.skip_whitespace(); me } - fn peek_token(&mut self) - -> Result<(Kind, usize), Error, Token<'a>>> - { + fn peek_token(&mut self) -> Result<(Kind, usize), Error, Token<'a>>> { use self::Kind::*; let mut iter = self.buf[self.off..].char_indices(); let cur_char = match iter.next() { @@ -146,8 +151,23 @@ impl<'a> TokenStream<'a> { }; match cur_char { - '!' | '$' | ':' | '=' | '@' | '|' | - '(' | ')' | '[' | ']' | '{' | '}' | '&' => { + '!' | '$' | ':' | '=' | '@' | '|' | '(' | ')' | '[' | ']' | '{' | '}' | '&' => { + match cur_char { + '(' | '[' | '{' => { + if self.recursion == RECURSION_LIMIT { + return Err(Error::message_static_message("Recursion limit exceeded")); + } + self.recursion += 1; + } + ')' | ']' | '}' => { + // Note that this can go below 0. + // We don't report that as an error here but + // it would be caught by the parser + self.recursion -= 1; + } + _ => {} + } + self.position.column += 1; self.off += 1; @@ -160,12 +180,11 @@ impl<'a> TokenStream<'a> { Ok((Punctuator, 3)) } else { - Err( - Error::unexpected_message( - format_args!("bare dot {:?} is not supported, \ - only \"...\"", cur_char) - ) - ) + Err(Error::unexpected_message(format_args!( + "bare dot {:?} is not supported, \ + only \"...\"", + cur_char + ))) } } '_' | 'a'..='z' | 'A'..='Z' => { @@ -195,24 +214,21 @@ impl<'a> TokenStream<'a> { }; match cur_char { // just scan for now, will validate later on - ' ' | '\n' | '\r' | '\t' | ',' | '#' | - '!' | '$' | ':' | '=' | '@' | '|' | '&' | - '(' | ')' | '[' | ']' | '{' | '}' - => break idx, + ' ' | '\n' | '\r' | '\t' | ',' | '#' | '!' | '$' | ':' | '=' | '@' + | '|' | '&' | '(' | ')' | '[' | ']' | '{' | '}' => break idx, '.' => real = Some(idx), 'e' | 'E' => exponent = Some(idx), - _ => {}, + _ => {} } }; if exponent.is_some() || real.is_some() { let value = &self.buf[self.off..][..len]; if !check_float(value, exponent, real) { - return Err( - Error::unexpected_message( - format_args!("unsupported float {:?}", value) - ) - ); + return Err(Error::unexpected_message(format_args!( + "unsupported float {:?}", + value + ))); } self.position.column += len; self.off += len; @@ -221,11 +237,10 @@ impl<'a> TokenStream<'a> { } else { let value = &self.buf[self.off..][..len]; if !check_int(value) { - return Err( - Error::unexpected_message( - format_args!("unsupported integer {:?}", value) - ) - ); + return Err(Error::unexpected_message(format_args!( + "unsupported integer {:?}", + value + ))); } self.position.column += len; self.off += len; @@ -243,11 +258,7 @@ impl<'a> TokenStream<'a> { } } - Err( - Error::unexpected_message( - "unterminated block string value" - ) - ) + Err(Error::unexpected_message("unterminated block string value")) } else { let mut nchars = 1; let mut escaped = false; @@ -257,37 +268,26 @@ impl<'a> TokenStream<'a> { '"' if escaped => {} '"' => { self.position.column += nchars; - self.off += idx+1; - return Ok((StringValue, idx+1)); + self.off += idx + 1; + return Ok((StringValue, idx + 1)); } '\n' => { - return Err( - Error::unexpected_message( - "unterminated string value" - ) - ); + return Err(Error::unexpected_message("unterminated string value")); } - _ => { - - } + _ => {} } // if we aren't escaped and the current char is a \, we are now escaped escaped = !escaped && cur_char == '\\'; } - Err( - Error::unexpected_message( - "unterminated string value" - ) - ) + Err(Error::unexpected_message("unterminated string value")) } } - _ => Err( - Error::unexpected_message( - format_args!("unexpected character {:?}", cur_char) - ) - ), + _ => Err(Error::unexpected_message(format_args!( + "unexpected character {:?}", + cur_char + ))), } } @@ -334,7 +334,7 @@ impl<'a> TokenStream<'a> { let lines = val.as_bytes().iter().filter(|&&x| x == b'\n').count(); self.position.line += lines; if lines > 0 { - let line_offset = val.rfind('\n').unwrap()+1; + let line_offset = val.rfind('\n').unwrap() + 1; let num = val[line_offset..].chars().count(); self.position.column = num + 1; } else { @@ -352,11 +352,11 @@ impl<'a> fmt::Display for Token<'a> { #[cfg(test)] mod test { - use super::{Kind, TokenStream}; use super::Kind::*; + use super::{Kind, TokenStream}; use combine::easy::Error; - use combine::{StreamOnce, Positioned}; + use combine::{Positioned, StreamOnce}; fn tok_str(s: &str) -> Vec<&str> { let mut r = Vec::new(); @@ -398,9 +398,14 @@ mod test { #[test] fn query() { - assert_eq!(tok_str("query Query { + assert_eq!( + tok_str( + "query Query { object { field } - }"), ["query", "Query", "{", "object", "{", "field", "}", "}"]); + }" + ), + ["query", "Query", "{", "object", "{", "field", "}", "}"] + ); } #[test] @@ -422,20 +427,50 @@ mod test { assert_eq!(tok_typ("-132"), [IntValue]); assert_eq!(tok_str("132"), ["132"]); assert_eq!(tok_typ("132"), [IntValue]); - assert_eq!(tok_str("a(x: 10) { b }"), - ["a", "(", "x", ":", "10", ")", "{", "b", "}"]); - assert_eq!(tok_typ("a(x: 10) { b }"), - [Name, Punctuator, Name, Punctuator, IntValue, Punctuator, - Punctuator, Name, Punctuator]); + assert_eq!( + tok_str("a(x: 10) { b }"), + ["a", "(", "x", ":", "10", ")", "{", "b", "}"] + ); + assert_eq!( + tok_typ("a(x: 10) { b }"), + [ + Name, Punctuator, Name, Punctuator, IntValue, Punctuator, Punctuator, Name, + Punctuator + ] + ); } // TODO(tailhook) fix errors in parser and check error message - #[test] #[should_panic] fn zero_int() { tok_str("01"); } - #[test] #[should_panic] fn zero_int4() { tok_str("00001"); } - #[test] #[should_panic] fn minus_int() { tok_str("-"); } - #[test] #[should_panic] fn minus_zero_int() { tok_str("-01"); } - #[test] #[should_panic] fn minus_zero_int4() { tok_str("-00001"); } - #[test] #[should_panic] fn letters_int() { tok_str("0bbc"); } + #[test] + #[should_panic] + fn zero_int() { + tok_str("01"); + } + #[test] + #[should_panic] + fn zero_int4() { + tok_str("00001"); + } + #[test] + #[should_panic] + fn minus_int() { + tok_str("-"); + } + #[test] + #[should_panic] + fn minus_zero_int() { + tok_str("-01"); + } + #[test] + #[should_panic] + fn minus_zero_int4() { + tok_str("-00001"); + } + #[test] + #[should_panic] + fn letters_int() { + tok_str("0bbc"); + } #[test] fn float() { @@ -463,29 +498,87 @@ mod test { assert_eq!(tok_typ("-132e+0"), [FloatValue]); assert_eq!(tok_str("132e+0"), ["132e+0"]); assert_eq!(tok_typ("132e+0"), [FloatValue]); - assert_eq!(tok_str("a(x: 10.0) { b }"), - ["a", "(", "x", ":", "10.0", ")", "{", "b", "}"]); - assert_eq!(tok_typ("a(x: 10.0) { b }"), - [Name, Punctuator, Name, Punctuator, FloatValue, Punctuator, - Punctuator, Name, Punctuator]); + assert_eq!( + tok_str("a(x: 10.0) { b }"), + ["a", "(", "x", ":", "10.0", ")", "{", "b", "}"] + ); + assert_eq!( + tok_typ("a(x: 10.0) { b }"), + [ + Name, Punctuator, Name, Punctuator, FloatValue, Punctuator, Punctuator, Name, + Punctuator + ] + ); assert_eq!(tok_str("1.23e4"), ["1.23e4"]); assert_eq!(tok_typ("1.23e4"), [FloatValue]); } // TODO(tailhook) fix errors in parser and check error message - #[test] #[should_panic] fn no_int_float() { tok_str(".0"); } - #[test] #[should_panic] fn no_int_float1() { tok_str(".1"); } - #[test] #[should_panic] fn zero_float() { tok_str("01.0"); } - #[test] #[should_panic] fn zero_float4() { tok_str("00001.0"); } - #[test] #[should_panic] fn minus_float() { tok_str("-.0"); } - #[test] #[should_panic] fn minus_zero_float() { tok_str("-01.0"); } - #[test] #[should_panic] fn minus_zero_float4() { tok_str("-00001.0"); } - #[test] #[should_panic] fn letters_float() { tok_str("0bbc.0"); } - #[test] #[should_panic] fn letters_float2() { tok_str("0.bbc"); } - #[test] #[should_panic] fn letters_float3() { tok_str("0.bbce0"); } - #[test] #[should_panic] fn no_exp_sign_float() { tok_str("0e0"); } - #[test] #[should_panic] fn unterminated_string() { tok_str(r#""hello\""#); } - #[test] #[should_panic] fn extra_unterminated_string() { tok_str(r#""hello\\\""#); } + #[test] + #[should_panic] + fn no_int_float() { + tok_str(".0"); + } + #[test] + #[should_panic] + fn no_int_float1() { + tok_str(".1"); + } + #[test] + #[should_panic] + fn zero_float() { + tok_str("01.0"); + } + #[test] + #[should_panic] + fn zero_float4() { + tok_str("00001.0"); + } + #[test] + #[should_panic] + fn minus_float() { + tok_str("-.0"); + } + #[test] + #[should_panic] + fn minus_zero_float() { + tok_str("-01.0"); + } + #[test] + #[should_panic] + fn minus_zero_float4() { + tok_str("-00001.0"); + } + #[test] + #[should_panic] + fn letters_float() { + tok_str("0bbc.0"); + } + #[test] + #[should_panic] + fn letters_float2() { + tok_str("0.bbc"); + } + #[test] + #[should_panic] + fn letters_float3() { + tok_str("0.bbce0"); + } + #[test] + #[should_panic] + fn no_exp_sign_float() { + tok_str("0e0"); + } + #[test] + #[should_panic] + fn unterminated_string() { + tok_str(r#""hello\""#); + } + #[test] + #[should_panic] + fn extra_unterminated_string() { + tok_str(r#""hello\\\""#); + } #[test] fn string() {