@@ -82,6 +82,7 @@ class RobotSemTokenTypes(Enum):
8282 KEYWORD_NAME = "keywordName"
8383 CONTROL_FLOW = "controlFlow"
8484 ARGUMENT = "argument"
85+ EMBEDDED_ARGUMENT = "embeddedArgument"
8586 VARIABLE = "variable"
8687 KEYWORD = "keywordCall"
8788 KEYWORD_INNER = "keywordCallInner"
@@ -400,18 +401,48 @@ async def generate_sem_sub_tokens(
400401 1 ,
401402 SemanticTokenTypes .OPERATOR ,
402403 )
403- # if builtin_library_doc is not None and KeywordMatcher(kw) in builtin_library_doc.keywords:
404- # doc = await namespace.find_keyword(token.value)
405- # if (
406- # doc is not None
407- # and doc.libname == cls.BUILTIN_MATCHER
408- # and KeywordMatcher(doc.name) == KeywordMatcher(kw)
409- # ):
410- # if not sem_mod:
411- # sem_mod = set()
412- # sem_mod.add(RobotSemTokenModifiers.BUILTIN)
413-
414- yield SemTokenInfo .from_token (token , sem_type , sem_mod , col_offset + kw_index , len (kw ))
404+
405+ if builtin_library_doc is not None and kw in builtin_library_doc .keywords :
406+ doc = await namespace .find_keyword (token .value )
407+ if (
408+ doc is not None
409+ and doc .libname == cls .BUILTIN_MATCHER
410+ and KeywordMatcher (doc .name ) == KeywordMatcher (kw )
411+ ):
412+ if not sem_mod :
413+ sem_mod = set ()
414+ sem_mod .add (RobotSemTokenModifiers .BUILTIN )
415+
416+ kw_doc = await namespace .find_keyword (token .value , raise_keyword_error = False )
417+ if kw_doc is not None and kw_doc .is_embedded :
418+ if get_robot_version () >= (6 , 0 ):
419+ m = kw_doc .matcher .embedded_arguments .match (kw )
420+ else :
421+ m = kw_doc .matcher .embedded_arguments .name .match (kw )
422+
423+ if m and m .lastindex is not None :
424+ start , end = m .span (0 )
425+ for i in range (1 , m .lastindex + 1 ):
426+ arg_start , arg_end = m .span (i )
427+ yield SemTokenInfo .from_token (
428+ token , sem_type , sem_mod , col_offset + kw_index + start , arg_start - start
429+ )
430+ yield SemTokenInfo .from_token (
431+ token ,
432+ RobotSemTokenTypes .EMBEDDED_ARGUMENT ,
433+ sem_mod ,
434+ col_offset + kw_index + arg_start ,
435+ arg_end - arg_start ,
436+ )
437+ start = arg_end + 1
438+
439+ if start < end :
440+ yield SemTokenInfo .from_token (
441+ token , sem_type , sem_mod , col_offset + kw_index + start , end - start
442+ )
443+
444+ else :
445+ yield SemTokenInfo .from_token (token , sem_type , sem_mod , col_offset + kw_index , len (kw ))
415446 elif token .type == RobotToken .NAME and isinstance (node , (LibraryImport , ResourceImport , VariablesImport )):
416447 yield SemTokenInfo .from_token (token , RobotSemTokenTypes .NAMESPACE , sem_mod , col_offset , length )
417448 elif get_robot_version () >= (5 , 0 ) and token .type == RobotToken .OPTION :
@@ -462,13 +493,12 @@ async def generate_sem_tokens(
462493 ) -> AsyncGenerator [SemTokenInfo , None ]:
463494 from robot .parsing .lexer .tokens import Token as RobotToken
464495
465- if token .type in {* RobotToken .ALLOW_VARIABLES , RobotToken . KEYWORD , ROBOT_KEYWORD_INNER }:
496+ if token .type in {* RobotToken .ALLOW_VARIABLES }:
466497
467498 for sub_token in self ._tokenize_variables (
468499 token ,
469500 ignore_errors = True ,
470501 identifiers = "$" if token .type == RobotToken .KEYWORD_NAME else "$@&%" ,
471- extra_types = {ROBOT_KEYWORD_INNER },
472502 ):
473503 async for e in self .generate_sem_sub_tokens (
474504 namespace , builtin_library_doc , libraries_matchers , resources_matchers , sub_token , node
0 commit comments