@@ -152,20 +152,25 @@ class RobotSemTokenModifiers(Enum):
152152 EMBEDDED = "embedded"
153153
154154
155+ # Type aliases for better type hints - extend base types to be compatible with LSP framework
156+ AnyTokenType = Union [RobotSemTokenTypes , SemanticTokenTypes , Enum ]
157+ AnyTokenModifier = Union [RobotSemTokenModifiers , SemanticTokenModifiers , Enum ]
158+
159+
155160@dataclass
156161class SemTokenInfo :
157162 lineno : int
158163 col_offset : int
159164 length : int
160- sem_token_type : Enum
161- sem_modifiers : Optional [Set [Enum ]] = None
165+ sem_token_type : AnyTokenType
166+ sem_modifiers : Optional [Set [AnyTokenModifier ]] = None
162167
163168 @classmethod
164169 def from_token (
165170 cls ,
166171 token : Token ,
167- sem_token_type : Enum ,
168- sem_modifiers : Optional [Set [Enum ]] = None ,
172+ sem_token_type : AnyTokenType ,
173+ sem_modifiers : Optional [Set [AnyTokenModifier ]] = None ,
169174 col_offset : Optional [int ] = None ,
170175 length : Optional [int ] = None ,
171176 ) -> "SemTokenInfo" :
@@ -187,7 +192,7 @@ class SemanticTokenMapper:
187192 - Managing builtin keyword matching
188193 """
189194
190- _mapping : ClassVar [Optional [Dict [str , Tuple [Enum , Optional [Set [Enum ]]]]]] = None
195+ _mapping : ClassVar [Optional [Dict [str , Tuple [AnyTokenType , Optional [Set [AnyTokenModifier ]]]]]] = None
191196
192197 ESCAPE_REGEX : ClassVar [Pattern [str ]] = re .compile (
193198 r"(?P<t>[^\\]+)|(?P<x>\\(?:[\\nrt]|x[0-9A-Fa-f]{2}|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8}))|(?P<e>\\(?:[^\\nrt\\xuU]|[\\xuU][^0-9a-fA-F]))" ,
@@ -197,13 +202,13 @@ class SemanticTokenMapper:
197202 BUILTIN_MATCHER : ClassVar [KeywordMatcher ] = KeywordMatcher ("BuiltIn" , is_namespace = True )
198203
199204 @classmethod
200- def generate_mapping (cls ) -> Dict [str , Tuple [Enum , Optional [Set [Enum ]]]]:
205+ def generate_mapping (cls ) -> Dict [str , Tuple [AnyTokenType , Optional [Set [AnyTokenModifier ]]]]:
201206 """Generate semantic token mappings for different Robot Framework versions.
202207
203208 Returns:
204209 Dict mapping token types to semantic token information
205210 """
206- definition : Dict [FrozenSet [str ], Tuple [Enum , Optional [Set [Enum ]]]] = {
211+ definition : Dict [FrozenSet [str ], Tuple [AnyTokenType , Optional [Set [AnyTokenModifier ]]]] = {
207212 frozenset (Token .HEADER_TOKENS ): (RobotSemTokenTypes .HEADER , None ),
208213 frozenset ({Token .SETTING_HEADER }): (
209214 RobotSemTokenTypes .HEADER_SETTINGS ,
@@ -348,15 +353,15 @@ def generate_mapping(cls) -> Dict[str, Tuple[Enum, Optional[Set[Enum]]]]:
348353 }
349354 )
350355
351- result : Dict [str , Tuple [Enum , Optional [Set [Enum ]]]] = {}
356+ result : Dict [str , Tuple [AnyTokenType , Optional [Set [AnyTokenModifier ]]]] = {}
352357 for k , v in definition .items ():
353358 for e in k :
354359 result [e ] = v
355360
356361 return result
357362
358363 @classmethod
359- def mapping (cls ) -> Dict [str , Tuple [Enum , Optional [Set [Enum ]]]]:
364+ def mapping (cls ) -> Dict [str , Tuple [AnyTokenType , Optional [Set [AnyTokenModifier ]]]]:
360365 """Get cached token type mappings.
361366
362367 Returns:
@@ -366,7 +371,9 @@ def mapping(cls) -> Dict[str, Tuple[Enum, Optional[Set[Enum]]]]:
366371 cls ._mapping = cls .generate_mapping ()
367372 return cls ._mapping
368373
369- def get_semantic_info (self , token_type : Optional [str ]) -> Optional [Tuple [Enum , Optional [Set [Enum ]]]]:
374+ def get_semantic_info (
375+ self , token_type : Optional [str ]
376+ ) -> Optional [Tuple [AnyTokenType , Optional [Set [AnyTokenModifier ]]]]:
370377 """Get semantic token information for a given token type.
371378
372379 Args:
@@ -632,68 +639,6 @@ def process_token_for_named_argument(
632639 else :
633640 yield token , node
634641
635- def process_semantic_token_for_named_argument (self , token : Token , node : ast .AST ) -> Iterator [SemTokenInfo ]:
636- """Process a token for named argument in semantic token context.
637-
638- Args:
639- token: The token to process
640- node: The AST node
641-
642- Yields:
643- SemTokenInfo: Semantic token information
644- """
645- name , value = split_from_equals (token .value )
646- if value is not None :
647- length = len (name )
648-
649- is_variable_node = hasattr (node , "name" ) and hasattr (node , "value" )
650-
651- yield SemTokenInfo .from_token (
652- Token (
653- ROBOT_NAMED_ARGUMENT if is_variable_node else SemanticTokenTypes .PARAMETER ,
654- name ,
655- token .lineno ,
656- token .col_offset ,
657- ),
658- (RobotSemTokenTypes .NAMED_ARGUMENT if is_variable_node else SemanticTokenTypes .PARAMETER ),
659- )
660- yield SemTokenInfo .from_token (
661- Token (
662- ROBOT_OPERATOR ,
663- "=" ,
664- token .lineno ,
665- token .col_offset + length ,
666- ),
667- SemanticTokenTypes .OPERATOR ,
668- )
669- modified_token = Token (
670- token .type ,
671- value ,
672- token .lineno ,
673- token .col_offset + length + 1 ,
674- token .error ,
675- )
676- yield SemTokenInfo .from_token (modified_token , RobotSemTokenTypes .ARGUMENT )
677-
678- elif hasattr (node , "assign" ) and name :
679- yield SemTokenInfo .from_token (
680- Token (
681- ROBOT_NAMED_ARGUMENT ,
682- name ,
683- token .lineno ,
684- token .col_offset ,
685- ),
686- RobotSemTokenTypes .NAMED_ARGUMENT ,
687- )
688- modified_token = Token (
689- token .type ,
690- "" ,
691- token .lineno ,
692- token .col_offset + len (name ),
693- token .error ,
694- )
695- yield SemTokenInfo .from_token (modified_token , RobotSemTokenTypes .ARGUMENT )
696-
697642
698643class KeywordTokenAnalyzer :
699644 """Specialized analysis for keyword tokens and run keywords.
0 commit comments