@@ -248,7 +248,8 @@ def mapping(cls) -> Dict[str, Tuple[Enum, Optional[Set[Enum]]]]:
248248 return cls .__mapping
249249
250250 ESCAPE_REGEX = re .compile (
251- r"(?P<t>[^\\]+)|(?P<x>\\([^xuU]|x[0-0a-f]{2}|u[0-9a-f]{4}|U[0-9a-f]{8}){0,1})" , re .MULTILINE | re .DOTALL
251+ r"(?P<t>[^\\]+)|(?P<x>\\(?:[\\nrt]|x[0-9A-Fa-f]{2}|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8}))|(?P<e>\\(?:[^\\nrt\\xuU]|[\\xuU][^0-9a-fA-F]))" ,
252+ re .MULTILINE | re .DOTALL ,
252253 )
253254 BDD_TOKEN_REGEX = re .compile (r"^(Given|When|Then|And|But)\s" , flags = re .IGNORECASE )
254255
@@ -324,7 +325,7 @@ async def generate_sem_sub_tokens(
324325 for g in cls .ESCAPE_REGEX .finditer (token .value ):
325326 yield SemTokenInfo .from_token (
326327 token ,
327- sem_type if g .group ("x" ) is None or g . end () - g . start () == 1 else RobotSemTokenTypes .ESCAPE ,
328+ sem_type if g .group ("x" ) is None else RobotSemTokenTypes .ESCAPE ,
328329 sem_mod ,
329330 col_offset + g .start (),
330331 g .end () - g .start (),
@@ -433,7 +434,20 @@ async def generate_sem_sub_tokens(
433434 else :
434435 yield SemTokenInfo .from_token (token , sem_type , sem_mod , col_offset + kw_index , len (kw ))
435436 elif token .type == RobotToken .NAME and isinstance (node , (LibraryImport , ResourceImport , VariablesImport )):
436- yield SemTokenInfo .from_token (token , RobotSemTokenTypes .NAMESPACE , sem_mod , col_offset , length )
437+ if "\\ " in token .value :
438+ if col_offset is None :
439+ col_offset = token .col_offset
440+
441+ for g in cls .ESCAPE_REGEX .finditer (token .value ):
442+ yield SemTokenInfo .from_token (
443+ token ,
444+ RobotSemTokenTypes .NAMESPACE if g .group ("x" ) is None else RobotSemTokenTypes .ESCAPE ,
445+ sem_mod ,
446+ col_offset + g .start (),
447+ g .end () - g .start (),
448+ )
449+ else :
450+ yield SemTokenInfo .from_token (token , RobotSemTokenTypes .NAMESPACE , sem_mod , col_offset , length )
437451 elif get_robot_version () >= (5 , 0 ) and token .type == RobotToken .OPTION :
438452 from robot .parsing .model .statements import ExceptHeader , WhileHeader
439453
@@ -479,10 +493,14 @@ async def generate_sem_tokens(
479493 builtin_library_doc : Optional [LibraryDoc ],
480494 ) -> AsyncIterator [SemTokenInfo ]:
481495 from robot .parsing .lexer .tokens import Token as RobotToken
482- from robot .parsing .model .statements import Arguments , Variable
496+ from robot .parsing .model .statements import Arguments , LibraryImport , ResourceImport , Variable , VariablesImport
483497 from robot .utils .escaping import split_from_equals
484498
485- if token .type in {RobotToken .ARGUMENT , RobotToken .TESTCASE_NAME , RobotToken .KEYWORD_NAME }:
499+ if (
500+ token .type in {RobotToken .ARGUMENT , RobotToken .TESTCASE_NAME , RobotToken .KEYWORD_NAME }
501+ or token .type == RobotToken .NAME
502+ and isinstance (node , (VariablesImport , LibraryImport , ResourceImport ))
503+ ):
486504 if (
487505 isinstance (node , Variable ) and token .type == RobotToken .ARGUMENT and node .name and node .name [0 ] == "&"
488506 ) or (isinstance (node , Arguments )):
0 commit comments