Skip to content

Commit 230ab92

Browse files
committed
Add is_punctuator_token to internal API
Replicates graphql/graphql-js@3e84962
1 parent d211b96 commit 230ab92

File tree

3 files changed

+53
-2
lines changed

3 files changed

+53
-2
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ a query language for APIs created by Facebook.
1313
[![Code Style](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/ambv/black)
1414

1515
The current version 1.0.2 of GraphQL-core-next is up-to-date with GraphQL.js version
16-
14.2.1. All parts of the API are covered by an extensive test suite of currently 1746
16+
14.2.1. All parts of the API are covered by an extensive test suite of currently 1748
1717
unit tests.
1818

1919

graphql/language/lexer.py

Lines changed: 25 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
from .source import Source
77
from .block_string import dedent_block_string_value
88

9-
__all__ = ["Lexer", "TokenKind", "Token"]
9+
__all__ = ["Lexer", "TokenKind", "Token", "is_punctuator_token"]
1010

1111

1212
class TokenKind(Enum):
@@ -103,6 +103,30 @@ def desc(self) -> str:
103103
return f"{kind} {value!r}" if value else kind
104104

105105

106+
_punctuator_tokens = frozenset(
107+
[
108+
TokenKind.BANG,
109+
TokenKind.DOLLAR,
110+
TokenKind.AMP,
111+
TokenKind.PAREN_L,
112+
TokenKind.PAREN_R,
113+
TokenKind.SPREAD,
114+
TokenKind.COLON,
115+
TokenKind.EQUALS,
116+
TokenKind.AT,
117+
TokenKind.BRACKET_L,
118+
TokenKind.BRACKET_R,
119+
TokenKind.BRACE_L,
120+
TokenKind.PIPE,
121+
TokenKind.BRACE_R,
122+
]
123+
)
124+
125+
126+
def is_punctuator_token(token: Token) -> bool:
127+
return token.kind in _punctuator_tokens
128+
129+
106130
def print_char(char):
107131
return repr(char) if char else TokenKind.EOF.value
108132

tests/language/test_lexer.py

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
from graphql.error import GraphQLSyntaxError
44
from graphql.language import Lexer, Source, SourceLocation, Token, TokenKind
5+
from graphql.language.lexer import is_punctuator_token
56
from graphql.pyutils import dedent, inspect
67

78

@@ -368,3 +369,29 @@ def produces_double_linked_list_of_tokens_including_comments():
368369
TokenKind.BRACE_R,
369370
TokenKind.EOF,
370371
]
372+
373+
374+
def describe_is_punctuator_token():
375+
def returns_true_for_punctuator_tokens():
376+
assert is_punctuator_token(lex_one("!")) is True
377+
assert is_punctuator_token(lex_one("$")) is True
378+
assert is_punctuator_token(lex_one("&")) is True
379+
assert is_punctuator_token(lex_one("(")) is True
380+
assert is_punctuator_token(lex_one(")")) is True
381+
assert is_punctuator_token(lex_one("...")) is True
382+
assert is_punctuator_token(lex_one(":")) is True
383+
assert is_punctuator_token(lex_one("=")) is True
384+
assert is_punctuator_token(lex_one("@")) is True
385+
assert is_punctuator_token(lex_one("[")) is True
386+
assert is_punctuator_token(lex_one("]")) is True
387+
assert is_punctuator_token(lex_one("{")) is True
388+
assert is_punctuator_token(lex_one("|")) is True
389+
assert is_punctuator_token(lex_one("}")) is True
390+
391+
def returns_false_for_non_punctuator_tokens():
392+
assert is_punctuator_token(lex_one("")) is False
393+
assert is_punctuator_token(lex_one("name")) is False
394+
assert is_punctuator_token(lex_one("1")) is False
395+
assert is_punctuator_token(lex_one("3.14")) is False
396+
assert is_punctuator_token(lex_one('"str"')) is False
397+
assert is_punctuator_token(lex_one('"""str"""')) is False

0 commit comments

Comments
 (0)