11from __future__ import annotations
22
3+ import ast
34import asyncio
45import itertools
56import operator
1213 Dict ,
1314 FrozenSet ,
1415 Generator ,
15- Iterable ,
1616 NamedTuple ,
1717 Optional ,
1818 Set ,
1919 Tuple ,
2020 Union ,
21+ cast ,
2122)
2223
2324from ....utils .async_event import CancelationToken
3334 SemanticTokensPartialResult ,
3435 SemanticTokenTypes ,
3536)
36- from ..utils .ast import Token , token_in_range
37+ from ..utils .ast import HasTokens , Token , iter_nodes , token_in_range
3738
3839if TYPE_CHECKING :
3940 from ..protocol import RobotLanguageServerProtocol
@@ -171,26 +172,40 @@ def mapping(cls) -> Dict[str, Tuple[Enum, Optional[Set[Enum]]]]:
171172
172173 @classmethod
173174 def generate_sem_sub_tokens (
174- cls , token : Token , col_offset : Optional [int ] = None , length : Optional [int ] = None
175+ cls , token : Token , node : ast . AST , col_offset : Optional [int ] = None , length : Optional [int ] = None
175176 ) -> Generator [SemTokenInfo , None , None ]:
176177 from robot .parsing .lexer .tokens import Token as RobotToken
178+ from robot .parsing .model .statements import (
179+ Documentation ,
180+ Fixture ,
181+ LibraryImport ,
182+ Metadata ,
183+ ResourceImport ,
184+ VariablesImport ,
185+ )
177186 from robot .variables .search import is_variable
178187
179188 sem_info = cls .mapping ().get (token .type , None ) if token .type is not None else None
180-
181189 if sem_info is not None :
190+ sem_type , sem_mod = sem_info
191+
192+ if isinstance (node , (Documentation , Metadata )):
193+ sem_mod = {SemanticTokenModifiers .DOCUMENTATION }
194+
182195 if token .type == RobotToken .VARIABLE :
183196 if col_offset is None :
184197 col_offset = token .col_offset
185198 if length is None :
186199 length = token .end_col_offset - token .col_offset
187200
188201 if is_variable (token .value ):
189- yield SemTokenInfo (token .lineno , col_offset , 2 , RobotSemTokenTypes .VARIABLE_BEGIN )
190- yield SemTokenInfo .from_token (token , sem_info [0 ], sem_info [1 ], col_offset + 2 , length - 3 )
191- yield SemTokenInfo (token .lineno , col_offset + length - 1 , 1 , RobotSemTokenTypes .VARIABLE_END )
202+ yield SemTokenInfo (token .lineno , col_offset , 2 , RobotSemTokenTypes .VARIABLE_BEGIN , sem_mod )
203+ yield SemTokenInfo .from_token (token , sem_type , sem_mod , col_offset + 2 , length - 3 )
204+ yield SemTokenInfo (
205+ token .lineno , col_offset + length - 1 , 1 , RobotSemTokenTypes .VARIABLE_END , sem_mod
206+ )
192207 else :
193- yield SemTokenInfo .from_token (token , sem_info [ 0 ], sem_info [ 1 ] )
208+ yield SemTokenInfo .from_token (token , sem_type , sem_mod )
194209
195210 elif token .type == RobotToken .ARGUMENT and "\\ " in token .value :
196211 if col_offset is None :
@@ -206,7 +221,7 @@ def generate_sem_sub_tokens(
206221 col_offset + g .start (),
207222 g .end () - g .start (),
208223 )
209- elif token .type == RobotToken .KEYWORD :
224+ elif token .type == RobotToken .KEYWORD or ( token . type == RobotToken . NAME and isinstance ( node , Fixture )) :
210225 if col_offset is None :
211226 col_offset = token .col_offset
212227 if length is None :
@@ -225,7 +240,7 @@ def generate_sem_sub_tokens(
225240 if token .value [:index ].casefold () == "BuiltIn" .casefold ()
226241 else None ,
227242 )
228- yield SemTokenInfo (token .lineno , col_offset + index , 1 , RobotSemTokenTypes .SEPARATOR )
243+ yield SemTokenInfo (token .lineno , col_offset + index , 1 , RobotSemTokenTypes .SEPARATOR , sem_mod )
229244
230245 new_index = token .value .find ("." , index + 1 )
231246 if new_index >= 0 :
@@ -234,58 +249,67 @@ def generate_sem_sub_tokens(
234249 else :
235250 break
236251
237- yield SemTokenInfo .from_token (
238- token , sem_info [ 0 ], sem_info [ 1 ], col_offset + index + 1 , length - index - 1
239- )
252+ yield SemTokenInfo .from_token (token , sem_type , sem_mod , col_offset + index + 1 , length - index - 1 )
253+ elif token . type == RobotToken . NAME and isinstance ( node , ( LibraryImport , ResourceImport , VariablesImport )):
254+ yield SemTokenInfo . from_token ( token , RobotSemTokenTypes . NAMESPACE , sem_mod , col_offset , length )
240255 else :
241- yield SemTokenInfo .from_token (token , sem_info [ 0 ], sem_info [ 1 ] , col_offset , length )
256+ yield SemTokenInfo .from_token (token , sem_type , sem_mod , col_offset , length )
242257
243258 @classmethod
244- def generate_sem_tokens (cls , token : Token ) -> Generator [SemTokenInfo , None , None ]:
259+ def generate_sem_tokens (cls , token : Token , node : ast . AST ) -> Generator [SemTokenInfo , None , None ]:
245260 from robot .parsing .lexer .tokens import Token as RobotToken
246261
247262 if token .type in RobotToken .ALLOW_VARIABLES :
248263 last_sub_token = token
249264 try :
250265 for sub_token in token .tokenize_variables ():
251266 last_sub_token = sub_token
252- for e in cls .generate_sem_sub_tokens (sub_token ):
267+ for e in cls .generate_sem_sub_tokens (sub_token , node ):
253268 yield e
254269 except BaseException :
255270 pass
256271 if last_sub_token == token :
257- for e in cls .generate_sem_sub_tokens (last_sub_token ):
272+ for e in cls .generate_sem_sub_tokens (last_sub_token , node ):
258273 yield e
259274 elif last_sub_token is not None and last_sub_token .end_col_offset < token .end_col_offset :
260275 for e in cls .generate_sem_sub_tokens (
261276 token ,
277+ node ,
262278 last_sub_token .end_col_offset ,
263279 token .end_col_offset - last_sub_token .end_col_offset - last_sub_token .col_offset ,
264280 ):
265281 yield e
266282
267283 else :
268- for e in cls .generate_sem_sub_tokens (token ):
284+ for e in cls .generate_sem_sub_tokens (token , node ):
269285 yield e
270286
271287 def collect (
272- self , tokens : Iterable [ Token ] , range : Optional [Range ], cancel_token : CancelationToken
288+ self , model : ast . AST , range : Optional [Range ], cancel_token : CancelationToken
273289 ) -> Union [SemanticTokens , SemanticTokensPartialResult , None ]:
274290
275291 data = []
276292 last_line = 0
277293 last_col = 0
278294
279- for robot_token in itertools .takewhile (
280- lambda t : not cancel_token .throw_if_canceled () and (range is None or token_in_range (t , range )),
295+ def get_tokens () -> Generator [Tuple [Token , ast .AST ], None , None ]:
296+ for node in iter_nodes (model ):
297+ if isinstance (node , HasTokens ):
298+ for token in cast (HasTokens , node ).tokens :
299+ yield token , node
300+
301+ for robot_token , robot_node in itertools .takewhile (
302+ lambda t : not cancel_token .throw_if_canceled () and (range is None or token_in_range (t [0 ], range )),
281303 itertools .dropwhile (
282- lambda t : not cancel_token .throw_if_canceled () and range is not None and not token_in_range (t , range ),
283- tokens ,
304+ lambda t : not cancel_token .throw_if_canceled ()
305+ and range is not None
306+ and not token_in_range (t [0 ], range ),
307+ get_tokens (),
284308 ),
285309 ):
286310 cancel_token .throw_if_canceled ()
287311
288- for token in self .generate_sem_tokens (robot_token ):
312+ for token in self .generate_sem_tokens (robot_token , robot_node ):
289313 current_line = token .lineno - 1
290314
291315 data .append (current_line - last_line )
@@ -321,7 +345,7 @@ async def collect_threading(
321345 try :
322346 cancel_token = CancelationToken ()
323347 return await asyncio .get_event_loop ().run_in_executor (
324- None , self .collect , await self .parent .documents_cache .get_tokens (document ), range , cancel_token
348+ None , self .collect , await self .parent .documents_cache .get_model (document ), range , cancel_token
325349 )
326350 except BaseException :
327351 cancel_token .cancel ()
0 commit comments