@@ -80,23 +80,23 @@ def visit_Statement(self, statement: ast.AST) -> None: # noqa: N802
8080 self .last_statement = statement
8181
8282
83+ _NON_DATA_TOKENS = {
84+ Token .SEPARATOR ,
85+ Token .CONTINUATION ,
86+ Token .EOL ,
87+ Token .EOS ,
88+ }
89+
90+ _NON_DATA_TOKENS_WITH_COMMENT = {* _NON_DATA_TOKENS , Token .COMMENT }
91+
92+
8393def _get_non_data_range_from_node (
8494 node : ast .AST , only_start : bool = False , allow_comments : bool = False
8595) -> Optional [Range ]:
96+ non_data_tokens = _NON_DATA_TOKENS_WITH_COMMENT if allow_comments else _NON_DATA_TOKENS
8697 if cached_isinstance (node , Statement ) and node .tokens :
8798 start_token = next (
88- (
89- v
90- for v in node .tokens
91- if v .type
92- not in [
93- Token .SEPARATOR ,
94- * ([] if allow_comments else [Token .COMMENT ]),
95- Token .CONTINUATION ,
96- Token .EOL ,
97- Token .EOS ,
98- ]
99- ),
99+ (v for v in node .tokens if v .type not in non_data_tokens ),
100100 None ,
101101 )
102102
@@ -106,18 +106,7 @@ def _get_non_data_range_from_node(
106106 end_tokens = node .tokens
107107
108108 end_token = next (
109- (
110- v
111- for v in reversed (end_tokens )
112- if v .type
113- not in [
114- Token .SEPARATOR ,
115- * ([] if allow_comments else [Token .COMMENT ]),
116- Token .CONTINUATION ,
117- Token .EOL ,
118- Token .EOS ,
119- ]
120- ),
109+ (v for v in reversed (end_tokens ) if v .type not in non_data_tokens ),
121110 None ,
122111 )
123112 if start_token is not None and end_token is not None :
@@ -282,35 +271,35 @@ def tokenize_variables(
282271 return _tokenize_variables (token , variables )
283272
284273
285- if get_robot_version () < ( 7 , 0 ) :
286-
287- def _tokenize_variables ( token : Token , variables : Any ) -> Iterator [ Token ]:
288- lineno = token . lineno
289- col_offset = token . col_offset
290- remaining = ""
291- for before , variable , remaining in variables :
292- if before :
293- yield Token (token . type , before , lineno , col_offset )
294- col_offset += len (before )
295- yield Token ( Token . VARIABLE , variable , lineno , col_offset )
296- col_offset += len ( variable )
297- if remaining :
298- yield Token ( token . type , remaining , lineno , col_offset )
299-
300- else :
301-
302- def _tokenize_variables ( token : Token , variables : Any ) -> Iterator [ Token ]:
303- lineno = token . lineno
304- col_offset = token . col_offset
305- after = ""
306- for match in variables :
307- if match .before :
308- yield Token ( token . type , match .before , lineno , col_offset )
309- yield Token ( Token . VARIABLE , match . match , lineno , col_offset + match . start )
310- col_offset += match . end
311- after = match . after
312- if after :
313- yield Token ( token . type , after , lineno , col_offset )
274+ def _tokenize_variables_before7 ( token : Token , variables : Any ) -> Iterator [ Token ] :
275+ lineno = token . lineno
276+ col_offset = token . col_offset
277+ remaining = ""
278+ for before , variable , remaining in variables :
279+ if before :
280+ yield Token ( token . type , before , lineno , col_offset )
281+ col_offset += len ( before )
282+ yield Token (Token . VARIABLE , variable , lineno , col_offset )
283+ col_offset += len (variable )
284+ if remaining :
285+ yield Token ( token . type , remaining , lineno , col_offset )
286+
287+
288+ def _tokenize_variables_v7 ( token : Token , variables : Any ) -> Iterator [ Token ]:
289+ lineno = token . lineno
290+ col_offset = token . col_offset
291+ after = ""
292+ for match in variables :
293+ if match . before :
294+ yield Token ( token . type , match . before , lineno , col_offset )
295+ yield Token ( Token . VARIABLE , match . match , lineno , col_offset + match . start )
296+ col_offset += match .end
297+ after = match .after
298+ if after :
299+ yield Token ( token . type , after , lineno , col_offset )
300+
301+
302+ _tokenize_variables = _tokenize_variables_before7 if get_robot_version () < ( 7 , 0 ) else _tokenize_variables_v7
314303
315304
316305def iter_over_keyword_names_and_owners (
0 commit comments