|
2 | 2 |
|
3 | 3 | from graphql.error import GraphQLSyntaxError |
4 | 4 | from graphql.language import Lexer, Source, SourceLocation, Token, TokenKind |
| 5 | +from graphql.language.lexer import is_punctuator_token |
5 | 6 | from graphql.pyutils import dedent, inspect |
6 | 7 |
|
7 | 8 |
|
@@ -368,3 +369,29 @@ def produces_double_linked_list_of_tokens_including_comments(): |
368 | 369 | TokenKind.BRACE_R, |
369 | 370 | TokenKind.EOF, |
370 | 371 | ] |
| 372 | + |
| 373 | + |
| 374 | +def describe_is_punctuator_token(): |
| 375 | + def returns_true_for_punctuator_tokens(): |
| 376 | + assert is_punctuator_token(lex_one("!")) is True |
| 377 | + assert is_punctuator_token(lex_one("$")) is True |
| 378 | + assert is_punctuator_token(lex_one("&")) is True |
| 379 | + assert is_punctuator_token(lex_one("(")) is True |
| 380 | + assert is_punctuator_token(lex_one(")")) is True |
| 381 | + assert is_punctuator_token(lex_one("...")) is True |
| 382 | + assert is_punctuator_token(lex_one(":")) is True |
| 383 | + assert is_punctuator_token(lex_one("=")) is True |
| 384 | + assert is_punctuator_token(lex_one("@")) is True |
| 385 | + assert is_punctuator_token(lex_one("[")) is True |
| 386 | + assert is_punctuator_token(lex_one("]")) is True |
| 387 | + assert is_punctuator_token(lex_one("{")) is True |
| 388 | + assert is_punctuator_token(lex_one("|")) is True |
| 389 | + assert is_punctuator_token(lex_one("}")) is True |
| 390 | + |
| 391 | + def returns_false_for_non_punctuator_tokens(): |
| 392 | + assert is_punctuator_token(lex_one("")) is False |
| 393 | + assert is_punctuator_token(lex_one("name")) is False |
| 394 | + assert is_punctuator_token(lex_one("1")) is False |
| 395 | + assert is_punctuator_token(lex_one("3.14")) is False |
| 396 | + assert is_punctuator_token(lex_one('"str"')) is False |
| 397 | + assert is_punctuator_token(lex_one('"""str"""')) is False |
0 commit comments