Skip to content

Commit 523b818

Browse files
committed
Parser: Add 'skip_keyword' function
Replicates graphql/graphql-js@26c9874
1 parent 0d129ff commit 523b818

File tree

1 file changed

+37
-24
lines changed

1 file changed

+37
-24
lines changed

graphql/language/parser.py

Lines changed: 37 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -333,19 +333,16 @@ def parse_fragment(lexer: Lexer) -> Union[FragmentSpreadNode, InlineFragmentNode
333333
"""
334334
start = lexer.token
335335
expect(lexer, TokenKind.SPREAD)
336-
if peek(lexer, TokenKind.NAME) and lexer.token.value != "on":
336+
337+
has_type_condition = skip_keyword(lexer, "on")
338+
if not has_type_condition and peek(lexer, TokenKind.NAME):
337339
return FragmentSpreadNode(
338340
name=parse_fragment_name(lexer),
339341
directives=parse_directives(lexer, False),
340342
loc=loc(lexer, start),
341343
)
342-
if lexer.token.value == "on":
343-
lexer.advance()
344-
type_condition: Optional[NamedTypeNode] = parse_named_type(lexer)
345-
else:
346-
type_condition = None
347344
return InlineFragmentNode(
348-
type_condition=type_condition,
345+
type_condition=parse_named_type(lexer) if has_type_condition else None,
349346
directives=parse_directives(lexer, False),
350347
selection_set=parse_selection_set(lexer),
351348
loc=loc(lexer, start),
@@ -362,14 +359,14 @@ def parse_fragment_definition(lexer: Lexer) -> FragmentDefinitionNode:
362359
return FragmentDefinitionNode(
363360
name=parse_fragment_name(lexer),
364361
variable_definitions=parse_variable_definitions(lexer),
365-
type_condition=expect_keyword(lexer, "on") and parse_named_type(lexer),
362+
type_condition=parse_type_condition(lexer),
366363
directives=parse_directives(lexer, False),
367364
selection_set=parse_selection_set(lexer),
368365
loc=loc(lexer, start),
369366
)
370367
return FragmentDefinitionNode(
371368
name=parse_fragment_name(lexer),
372-
type_condition=expect_keyword(lexer, "on") and parse_named_type(lexer),
369+
type_condition=parse_type_condition(lexer),
373370
directives=parse_directives(lexer, False),
374371
selection_set=parse_selection_set(lexer),
375372
loc=loc(lexer, start),
@@ -389,6 +386,12 @@ def parse_fragment_name(lexer: Lexer) -> NameNode:
389386
return parse_name(lexer)
390387

391388

389+
def parse_type_condition(lexer: Lexer) -> NamedTypeNode:
390+
"""TypeCondition: NamedType"""
391+
expect_keyword(lexer, "on")
392+
return parse_named_type(lexer)
393+
394+
392395
# Implement the parsing rules in the Values section.
393396

394397

@@ -648,8 +651,7 @@ def parse_object_type_definition(lexer: Lexer) -> ObjectTypeDefinitionNode:
648651
def parse_implements_interfaces(lexer: Lexer) -> List[NamedTypeNode]:
649652
"""ImplementsInterfaces"""
650653
types: List[NamedTypeNode] = []
651-
if lexer.token.value == "implements":
652-
lexer.advance()
654+
if skip_keyword(lexer, "implements"):
653655
# optional leading ampersand
654656
skip(lexer, TokenKind.AMP)
655657
append = types.append
@@ -1052,13 +1054,13 @@ def peek(lexer: Lexer, kind: TokenKind):
10521054
def skip(lexer: Lexer, kind: TokenKind) -> bool:
10531055
"""Conditionally skip the next token.
10541056
1055-
If the next token is of the given kind, return true after advancing the lexer.
1056-
Otherwise, do not change the parser state and return false.
1057+
If the next token is of the given kind, return True after advancing the lexer.
1058+
Otherwise, do not change the parser state and return False.
10571059
"""
1058-
match = lexer.token.kind == kind
1059-
if match:
1060+
if lexer.token.kind == kind:
10601061
lexer.advance()
1061-
return match
1062+
return True
1063+
return False
10621064

10631065

10641066
def expect(lexer: Lexer, kind: TokenKind) -> Token:
@@ -1076,19 +1078,30 @@ def expect(lexer: Lexer, kind: TokenKind) -> Token:
10761078
)
10771079

10781080

1079-
def expect_keyword(lexer: Lexer, value: str) -> Token:
1080-
"""Check next token for given keyword
1081+
def skip_keyword(lexer: Lexer, value: str) -> bool:
1082+
"""Conditionally skip the next keyword.
10811083
1082-
If the next token is a keyword with the given value, return that token after
1083-
advancing the lexer. Otherwise, do not change the parser state and return False.
1084+
If the next token is a keyword with the given value, return True after advancing
1085+
the lexer. Otherwise, do not change the parser state and return False.
10841086
"""
10851087
token = lexer.token
10861088
if token.kind == TokenKind.NAME and token.value == value:
10871089
lexer.advance()
1088-
return token
1089-
raise GraphQLSyntaxError(
1090-
lexer.source, token.start, f"Expected {value!r}, found {token.desc}"
1091-
)
1090+
return True
1091+
return False
1092+
1093+
1094+
def expect_keyword(lexer: Lexer, value: str) -> None:
1095+
"""Check next token for given keyword.
1096+
1097+
If the next token is a keyword with the given value, advance the lexer. Otherwise,
1098+
do not change the parser state and throw an error.
1099+
"""
1100+
if not skip_keyword(lexer, value):
1101+
token = lexer.token
1102+
raise GraphQLSyntaxError(
1103+
lexer.source, token.start, f"Expected {value!r}, found {token.desc}"
1104+
)
10921105

10931106

10941107
def unexpected(lexer: Lexer, at_token: Token = None) -> GraphQLError:

0 commit comments

Comments
 (0)