@@ -353,10 +353,10 @@ def lexes_block_strings():
353353 TokenKind .BLOCK_STRING , 0 , 32 , 1 , 1 , 'contains """ triple-quote'
354354 )
355355 assert lex_one ('"""multi\n line"""' ) == Token (
356- TokenKind .BLOCK_STRING , 0 , 16 , 2 , - 8 , "multi\n line"
356+ TokenKind .BLOCK_STRING , 0 , 16 , 1 , 1 , "multi\n line"
357357 )
358358 assert lex_one ('"""multi\r line\r \n normalized"""' ) == Token (
359- TokenKind .BLOCK_STRING , 0 , 28 , 3 , - 14 , "multi\n line\n normalized"
359+ TokenKind .BLOCK_STRING , 0 , 28 , 1 , 1 , "multi\n line\n normalized"
360360 )
361361 assert lex_one ('"""unescaped \\ n\\ r\\ b\\ t\\ f\\ u1234"""' ) == Token (
362362 TokenKind .BLOCK_STRING ,
@@ -388,9 +388,7 @@ def lexes_block_strings():
388388 assert lex_one (
389389 '"""\n \n spans\n multiple\n '
390390 ' lines\n \n """'
391- ) == Token (
392- TokenKind .BLOCK_STRING , 0 , 68 , 7 , - 56 , "spans\n multiple\n lines"
393- )
391+ ) == Token (TokenKind .BLOCK_STRING , 0 , 68 , 1 , 1 , "spans\n multiple\n lines" )
394392
395393 def advance_line_after_lexing_multiline_block_string ():
396394 assert (
0 commit comments