@@ -333,19 +333,16 @@ def parse_fragment(lexer: Lexer) -> Union[FragmentSpreadNode, InlineFragmentNode
333
333
"""
334
334
start = lexer .token
335
335
expect (lexer , TokenKind .SPREAD )
336
- if peek (lexer , TokenKind .NAME ) and lexer .token .value != "on" :
336
+
337
+ has_type_condition = skip_keyword (lexer , "on" )
338
+ if not has_type_condition and peek (lexer , TokenKind .NAME ):
337
339
return FragmentSpreadNode (
338
340
name = parse_fragment_name (lexer ),
339
341
directives = parse_directives (lexer , False ),
340
342
loc = loc (lexer , start ),
341
343
)
342
- if lexer .token .value == "on" :
343
- lexer .advance ()
344
- type_condition : Optional [NamedTypeNode ] = parse_named_type (lexer )
345
- else :
346
- type_condition = None
347
344
return InlineFragmentNode (
348
- type_condition = type_condition ,
345
+ type_condition = parse_named_type ( lexer ) if has_type_condition else None ,
349
346
directives = parse_directives (lexer , False ),
350
347
selection_set = parse_selection_set (lexer ),
351
348
loc = loc (lexer , start ),
@@ -362,14 +359,14 @@ def parse_fragment_definition(lexer: Lexer) -> FragmentDefinitionNode:
362
359
return FragmentDefinitionNode (
363
360
name = parse_fragment_name (lexer ),
364
361
variable_definitions = parse_variable_definitions (lexer ),
365
- type_condition = expect_keyword ( lexer , "on" ) and parse_named_type (lexer ),
362
+ type_condition = parse_type_condition (lexer ),
366
363
directives = parse_directives (lexer , False ),
367
364
selection_set = parse_selection_set (lexer ),
368
365
loc = loc (lexer , start ),
369
366
)
370
367
return FragmentDefinitionNode (
371
368
name = parse_fragment_name (lexer ),
372
- type_condition = expect_keyword ( lexer , "on" ) and parse_named_type (lexer ),
369
+ type_condition = parse_type_condition (lexer ),
373
370
directives = parse_directives (lexer , False ),
374
371
selection_set = parse_selection_set (lexer ),
375
372
loc = loc (lexer , start ),
@@ -389,6 +386,12 @@ def parse_fragment_name(lexer: Lexer) -> NameNode:
389
386
return parse_name (lexer )
390
387
391
388
389
+ def parse_type_condition (lexer : Lexer ) -> NamedTypeNode :
390
+ """TypeCondition: NamedType"""
391
+ expect_keyword (lexer , "on" )
392
+ return parse_named_type (lexer )
393
+
394
+
392
395
# Implement the parsing rules in the Values section.
393
396
394
397
@@ -648,8 +651,7 @@ def parse_object_type_definition(lexer: Lexer) -> ObjectTypeDefinitionNode:
648
651
def parse_implements_interfaces (lexer : Lexer ) -> List [NamedTypeNode ]:
649
652
"""ImplementsInterfaces"""
650
653
types : List [NamedTypeNode ] = []
651
- if lexer .token .value == "implements" :
652
- lexer .advance ()
654
+ if skip_keyword (lexer , "implements" ):
653
655
# optional leading ampersand
654
656
skip (lexer , TokenKind .AMP )
655
657
append = types .append
@@ -1052,13 +1054,13 @@ def peek(lexer: Lexer, kind: TokenKind):
1052
1054
def skip (lexer : Lexer , kind : TokenKind ) -> bool :
1053
1055
"""Conditionally skip the next token.
1054
1056
1055
- If the next token is of the given kind, return true after advancing the lexer.
1056
- Otherwise, do not change the parser state and return false .
1057
+ If the next token is of the given kind, return True after advancing the lexer.
1058
+ Otherwise, do not change the parser state and return False .
1057
1059
"""
1058
- match = lexer .token .kind == kind
1059
- if match :
1060
+ if lexer .token .kind == kind :
1060
1061
lexer .advance ()
1061
- return match
1062
+ return True
1063
+ return False
1062
1064
1063
1065
1064
1066
def expect (lexer : Lexer , kind : TokenKind ) -> Token :
@@ -1076,19 +1078,30 @@ def expect(lexer: Lexer, kind: TokenKind) -> Token:
1076
1078
)
1077
1079
1078
1080
1079
- def expect_keyword (lexer : Lexer , value : str ) -> Token :
1080
- """Check next token for given keyword
1081
+ def skip_keyword (lexer : Lexer , value : str ) -> bool :
1082
+ """Conditionally skip the next keyword.
1081
1083
1082
- If the next token is a keyword with the given value, return that token after
1083
- advancing the lexer. Otherwise, do not change the parser state and return False.
1084
+ If the next token is a keyword with the given value, return True after advancing
1085
+ the lexer. Otherwise, do not change the parser state and return False.
1084
1086
"""
1085
1087
token = lexer .token
1086
1088
if token .kind == TokenKind .NAME and token .value == value :
1087
1089
lexer .advance ()
1088
- return token
1089
- raise GraphQLSyntaxError (
1090
- lexer .source , token .start , f"Expected { value !r} , found { token .desc } "
1091
- )
1090
+ return True
1091
+ return False
1092
+
1093
+
1094
+ def expect_keyword (lexer : Lexer , value : str ) -> None :
1095
+ """Check next token for given keyword.
1096
+
1097
+ If the next token is a keyword with the given value, advance the lexer. Otherwise,
1098
+ do not change the parser state and throw an error.
1099
+ """
1100
+ if not skip_keyword (lexer , value ):
1101
+ token = lexer .token
1102
+ raise GraphQLSyntaxError (
1103
+ lexer .source , token .start , f"Expected { value !r} , found { token .desc } "
1104
+ )
1092
1105
1093
1106
1094
1107
def unexpected (lexer : Lexer , at_token : Token = None ) -> GraphQLError :
0 commit comments