Skip to content

Commit f799fb4

Browse files
committed
refactor: some code cleanup and simplifications
1 parent 37981fa commit f799fb4

File tree

12 files changed

+87
-138
lines changed

12 files changed

+87
-138
lines changed

packages/core/src/robotcode/core/dataclasses.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,7 @@ def encode_case_for_field_name(obj: Any, field: dataclasses.Field) -> str: # ty
128128
__decode_case_cache: Dict[Tuple[Type[Any], str], str] = {}
129129

130130

131-
def _decode_case_for_member_name(type: Type[_T], name: str) -> str:
131+
def _decode_case_for_member_name(type: Type[Any], name: str) -> str:
132132
r = __decode_case_cache.get((type, name), __NOT_SET)
133133
if r is __NOT_SET:
134134
if dataclasses.is_dataclass(type):
@@ -141,7 +141,7 @@ def _decode_case_for_member_name(type: Type[_T], name: str) -> str:
141141

142142
if r is __NOT_SET:
143143
if hasattr(type, "_decode_case"):
144-
r = str(type._decode_case(name)) # type: ignore[attr-defined]
144+
r = str(type._decode_case(name))
145145
else:
146146
r = name
147147

packages/language_server/src/robotcode/language_server/robotframework/diagnostics/namespace.py

Lines changed: 64 additions & 120 deletions
Original file line numberDiff line numberDiff line change
@@ -24,9 +24,17 @@
2424
Set,
2525
Tuple,
2626
Union,
27-
cast,
2827
)
2928

29+
from robot.errors import VariableError
30+
from robot.libraries import STDLIBS
31+
from robot.parsing.lexer.tokens import Token
32+
from robot.parsing.model.blocks import Keyword, SettingSection, TestCase, VariableSection
33+
from robot.parsing.model.statements import Arguments, KeywordCall, KeywordName, Statement, Variable
34+
from robot.parsing.model.statements import LibraryImport as RobotLibraryImport
35+
from robot.parsing.model.statements import ResourceImport as RobotResourceImport
36+
from robot.parsing.model.statements import VariablesImport as RobotVariablesImport
37+
from robot.variables.search import is_scalar_assign, search_variable
3038
from robotcode.core.async_tools import Lock, async_event
3139
from robotcode.core.logging import LoggingDescriptor
3240
from robotcode.core.lsp.types import (
@@ -42,16 +50,15 @@
4250
)
4351
from robotcode.core.uri import Uri
4452
from robotcode.robot.utils import get_robot_version
45-
46-
from ...common.text_document import TextDocument
47-
from ..languages import Languages
48-
from ..utils.ast_utils import (
49-
Token,
53+
from robotcode.robot.utils.ast import (
5054
range_from_node,
5155
range_from_token,
5256
strip_variable_token,
5357
tokenize_variables,
5458
)
59+
60+
from ...common.text_document import TextDocument
61+
from ..languages import Languages
5562
from ..utils.async_ast import Visitor
5663
from ..utils.match import eq_namespace
5764
from ..utils.variables import BUILTIN_VARIABLES
@@ -111,19 +118,14 @@ def get(self, source: str, model: ast.AST) -> List[VariableDefinition]:
111118
return self._results
112119

113120
def visit_Section(self, node: ast.AST) -> None: # noqa: N802
114-
from robot.parsing.model.blocks import VariableSection
115-
116121
if isinstance(node, VariableSection):
117122
self.generic_visit(node)
118123

119-
def visit_Variable(self, node: ast.AST) -> None: # noqa: N802
120-
from robot.parsing.lexer.tokens import Token as RobotToken
121-
from robot.parsing.model.statements import Variable
122-
from robot.variables import search_variable
123-
124-
variable = cast(Variable, node)
124+
def visit_Variable(self, node: Variable) -> None: # noqa: N802
125+
name_token = node.get_token(Token.VARIABLE)
126+
if name_token is None:
127+
return
125128

126-
name_token = variable.get_token(RobotToken.VARIABLE)
127129
name = name_token.value
128130

129131
if name is not None:
@@ -134,21 +136,21 @@ def visit_Variable(self, node: ast.AST) -> None: # noqa: N802
134136
if name.endswith("="):
135137
name = name[:-1].rstrip()
136138

137-
has_value = bool(variable.value)
139+
has_value = bool(node.value)
138140
value = tuple(
139-
s.replace("${CURDIR}", str(Path(self.source).parent).replace("\\", "\\\\")) for s in variable.value
141+
s.replace("${CURDIR}", str(Path(self.source).parent).replace("\\", "\\\\")) for s in node.value
140142
)
141143

142144
self._results.append(
143145
VariableDefinition(
144-
name=variable.name,
146+
name=node.name,
145147
name_token=strip_variable_token(
146-
RobotToken(name_token.type, name, name_token.lineno, name_token.col_offset, name_token.error)
148+
Token(name_token.type, name, name_token.lineno, name_token.col_offset, name_token.error)
147149
),
148-
line_no=variable.lineno,
149-
col_offset=variable.col_offset,
150-
end_line_no=variable.lineno,
151-
end_col_offset=variable.end_col_offset,
150+
line_no=node.lineno,
151+
col_offset=node.col_offset,
152+
end_line_no=node.lineno,
153+
end_col_offset=node.end_col_offset,
152154
source=self.source,
153155
has_value=has_value,
154156
resolvable=True,
@@ -187,22 +189,17 @@ def visit_Keyword(self, node: ast.AST) -> None: # noqa: N802
187189
finally:
188190
self.current_kw_doc = None
189191

190-
def visit_KeywordName(self, node: ast.AST) -> None: # noqa: N802
191-
from robot.parsing.lexer.tokens import Token as RobotToken
192-
from robot.parsing.model.statements import KeywordName
193-
from robot.variables.search import search_variable
194-
192+
def visit_KeywordName(self, node: KeywordName) -> None: # noqa: N802
195193
from .model_helper import ModelHelperMixin
196194

197-
n = cast(KeywordName, node)
198-
name_token = cast(Token, n.get_token(RobotToken.KEYWORD_NAME))
195+
name_token = node.get_token(Token.KEYWORD_NAME)
199196

200197
if name_token is not None and name_token.value:
201198
keyword = ModelHelperMixin.get_keyword_definition_at_token(self.library_doc, name_token)
202199
self.current_kw_doc = keyword
203200

204201
for variable_token in filter(
205-
lambda e: e.type == RobotToken.VARIABLE,
202+
lambda e: e.type == Token.VARIABLE,
206203
tokenize_variables(name_token, identifiers="$", ignore_errors=True),
207204
):
208205
if variable_token.value:
@@ -225,30 +222,24 @@ def visit_KeywordName(self, node: ast.AST) -> None: # noqa: N802
225222
)
226223

227224
def get_variable_token(self, token: Token) -> Optional[Token]:
228-
from robot.parsing.lexer.tokens import Token as RobotToken
229-
230225
return next(
231226
(
232227
v
233228
for v in itertools.dropwhile(
234-
lambda t: t.type in RobotToken.NON_DATA_TOKENS,
229+
lambda t: t.type in Token.NON_DATA_TOKENS,
235230
tokenize_variables(token, ignore_errors=True),
236231
)
237-
if v.type == RobotToken.VARIABLE
232+
if v.type == Token.VARIABLE
238233
),
239234
None,
240235
)
241236

242-
def visit_Arguments(self, node: ast.AST) -> None: # noqa: N802
243-
from robot.errors import VariableError
244-
from robot.parsing.lexer.tokens import Token as RobotToken
245-
from robot.parsing.model.statements import Arguments
246-
237+
def visit_Arguments(self, node: Arguments) -> None: # noqa: N802
247238
args: List[str] = []
248-
n = cast(Arguments, node)
249-
arguments = n.get_tokens(RobotToken.ARGUMENT)
250239

251-
for argument_token in (cast(RobotToken, e) for e in arguments):
240+
arguments = node.get_tokens(Token.ARGUMENT)
241+
242+
for argument_token in arguments:
252243
try:
253244
argument = self.get_variable_token(argument_token)
254245

@@ -278,14 +269,8 @@ def visit_Arguments(self, node: ast.AST) -> None: # noqa: N802
278269
except VariableError:
279270
pass
280271

281-
def visit_ExceptHeader(self, node: ast.AST) -> None: # noqa: N802
282-
from robot.errors import VariableError
283-
from robot.parsing.lexer.tokens import Token as RobotToken
284-
from robot.parsing.model.statements import ExceptHeader
285-
from robot.variables import is_scalar_assign
286-
287-
n = cast(ExceptHeader, node)
288-
variables = n.get_tokens(RobotToken.VARIABLE)[:1]
272+
def visit_ExceptHeader(self, node: Statement) -> None: # noqa: N802
273+
variables = node.get_tokens(Token.VARIABLE)[:1]
289274
if variables and is_scalar_assign(variables[0].value):
290275
try:
291276
variable = self.get_variable_token(variables[0])
@@ -304,23 +289,17 @@ def visit_ExceptHeader(self, node: ast.AST) -> None: # noqa: N802
304289
except VariableError:
305290
pass
306291

307-
def visit_KeywordCall(self, node: ast.AST) -> None: # noqa: N802
308-
from robot.errors import VariableError
309-
from robot.parsing.lexer.tokens import Token as RobotToken
310-
from robot.parsing.model.statements import KeywordCall
311-
292+
def visit_KeywordCall(self, node: KeywordCall) -> None: # noqa: N802
312293
# TODO analyze "Set Local/Global/Suite Variable"
313294

314-
n = cast(KeywordCall, node)
315-
316-
for assign_token in n.get_tokens(RobotToken.ASSIGN):
295+
for assign_token in node.get_tokens(Token.ASSIGN):
317296
variable_token = self.get_variable_token(assign_token)
318297

319298
try:
320299
if variable_token is not None:
321300
if (
322301
self.position is not None
323-
and self.position in range_from_node(n)
302+
and self.position in range_from_node(node)
324303
and self.position > range_from_token(variable_token).end
325304
):
326305
continue
@@ -339,21 +318,15 @@ def visit_KeywordCall(self, node: ast.AST) -> None: # noqa: N802
339318
except VariableError:
340319
pass
341320

342-
def visit_InlineIfHeader(self, node: ast.AST) -> None: # noqa: N802
343-
from robot.errors import VariableError
344-
from robot.parsing.lexer.tokens import Token as RobotToken
345-
from robot.parsing.model.statements import InlineIfHeader
346-
347-
n = cast(InlineIfHeader, node)
348-
349-
for assign_token in n.get_tokens(RobotToken.ASSIGN):
321+
def visit_InlineIfHeader(self, node: Statement) -> None: # noqa: N802
322+
for assign_token in node.get_tokens(Token.ASSIGN):
350323
variable_token = self.get_variable_token(assign_token)
351324

352325
try:
353326
if variable_token is not None:
354327
if (
355328
self.position is not None
356-
and self.position in range_from_node(n)
329+
and self.position in range_from_node(node)
357330
and self.position > range_from_token(variable_token).end
358331
):
359332
continue
@@ -372,12 +345,8 @@ def visit_InlineIfHeader(self, node: ast.AST) -> None: # noqa: N802
372345
except VariableError:
373346
pass
374347

375-
def visit_ForHeader(self, node: ast.AST) -> None: # noqa: N802
376-
from robot.parsing.lexer.tokens import Token as RobotToken
377-
from robot.parsing.model.statements import ForHeader
378-
379-
n = cast(ForHeader, node)
380-
variables = n.get_tokens(RobotToken.VARIABLE)
348+
def visit_ForHeader(self, node: Statement) -> None: # noqa: N802
349+
variables = node.get_tokens(Token.VARIABLE)
381350
for variable in variables:
382351
variable_token = self.get_variable_token(variable)
383352
if variable_token is not None and variable_token.value and variable_token.value not in self._results:
@@ -400,31 +369,23 @@ def get(self, source: str, model: ast.AST) -> List[Import]:
400369
return self._results
401370

402371
def visit_Section(self, node: ast.AST) -> None: # noqa: N802
403-
from robot.parsing.model.blocks import SettingSection
404-
405372
if isinstance(node, SettingSection):
406373
self.generic_visit(node)
407374

408-
def visit_LibraryImport(self, node: ast.AST) -> None: # noqa: N802
409-
from robot.parsing.lexer.tokens import Token as RobotToken
410-
from robot.parsing.model.statements import LibraryImport as RobotLibraryImport
411-
412-
n = cast(RobotLibraryImport, node)
413-
name = cast(RobotToken, n.get_token(RobotToken.NAME))
375+
def visit_LibraryImport(self, node: RobotLibraryImport) -> None: # noqa: N802
376+
name = node.get_token(Token.NAME)
414377

415-
separator = n.get_token(RobotToken.WITH_NAME)
416-
alias_token = n.get_tokens(RobotToken.NAME)[-1] if separator else None
378+
separator = node.get_token(Token.WITH_NAME)
379+
alias_token = node.get_tokens(Token.NAME)[-1] if separator else None
417380

418-
last_data_token = cast(
419-
RobotToken, next(v for v in reversed(n.tokens) if v.type not in RobotToken.NON_DATA_TOKENS)
420-
)
421-
if n.name:
381+
last_data_token = next(v for v in reversed(node.tokens) if v.type not in Token.NON_DATA_TOKENS)
382+
if node.name:
422383
self._results.append(
423384
LibraryImport(
424-
name=n.name,
385+
name=node.name,
425386
name_token=name if name is not None else None,
426-
args=n.args,
427-
alias=n.alias,
387+
args=node.args,
388+
alias=node.alias,
428389
alias_token=alias_token,
429390
line_no=node.lineno,
430391
col_offset=node.col_offset,
@@ -442,20 +403,14 @@ def visit_LibraryImport(self, node: ast.AST) -> None: # noqa: N802
442403
)
443404
)
444405

445-
def visit_ResourceImport(self, node: ast.AST) -> None: # noqa: N802
446-
from robot.parsing.lexer.tokens import Token as RobotToken
447-
from robot.parsing.model.statements import ResourceImport as RobotResourceImport
448-
449-
n = cast(RobotResourceImport, node)
450-
name = cast(RobotToken, n.get_token(RobotToken.NAME))
406+
def visit_ResourceImport(self, node: RobotResourceImport) -> None: # noqa: N802
407+
name = node.get_token(Token.NAME)
451408

452-
last_data_token = cast(
453-
RobotToken, next(v for v in reversed(n.tokens) if v.type not in RobotToken.NON_DATA_TOKENS)
454-
)
455-
if n.name:
409+
last_data_token = next(v for v in reversed(node.tokens) if v.type not in Token.NON_DATA_TOKENS)
410+
if node.name:
456411
self._results.append(
457412
ResourceImport(
458-
name=n.name,
413+
name=node.name,
459414
name_token=name if name is not None else None,
460415
line_no=node.lineno,
461416
col_offset=node.col_offset,
@@ -473,22 +428,16 @@ def visit_ResourceImport(self, node: ast.AST) -> None: # noqa: N802
473428
)
474429
)
475430

476-
def visit_VariablesImport(self, node: ast.AST) -> None: # noqa: N802
477-
from robot.parsing.lexer.tokens import Token as RobotToken
478-
from robot.parsing.model.statements import VariablesImport as RobotVariablesImport
479-
480-
n = cast(RobotVariablesImport, node)
481-
name = cast(RobotToken, n.get_token(RobotToken.NAME))
431+
def visit_VariablesImport(self, node: RobotVariablesImport) -> None: # noqa: N802
432+
name = node.get_token(Token.NAME)
482433

483-
last_data_token = cast(
484-
RobotToken, next(v for v in reversed(n.tokens) if v.type not in RobotToken.NON_DATA_TOKENS)
485-
)
486-
if n.name:
434+
last_data_token = next(v for v in reversed(node.tokens) if v.type not in Token.NON_DATA_TOKENS)
435+
if node.name:
487436
self._results.append(
488437
VariablesImport(
489-
name=n.name,
438+
name=node.name,
490439
name_token=name if name is not None else None,
491-
args=n.args,
440+
args=node.args,
492441
line_no=node.lineno,
493442
col_offset=node.col_offset,
494443
end_line_no=last_data_token.lineno
@@ -926,9 +875,6 @@ async def yield_variables(
926875
position: Optional[Position] = None,
927876
skip_commandline_variables: bool = False,
928877
) -> AsyncIterator[Tuple[VariableMatcher, VariableDefinition]]:
929-
from robot.parsing.model.blocks import Keyword, TestCase
930-
from robot.parsing.model.statements import Arguments
931-
932878
yielded: Dict[VariableMatcher, VariableDefinition] = {}
933879

934880
test_or_keyword_nodes = list(
@@ -2020,8 +1966,6 @@ def _get_keyword_from_libraries(self, name: str) -> Optional[KeywordDoc]:
20201966
def _filter_stdlib_runner(
20211967
self, entry1: Tuple[Optional[LibraryEntry], KeywordDoc], entry2: Tuple[Optional[LibraryEntry], KeywordDoc]
20221968
) -> List[Tuple[Optional[LibraryEntry], KeywordDoc]]:
2023-
from robot.libraries import STDLIBS
2024-
20251969
stdlibs_without_remote = STDLIBS - {"Remote"}
20261970
if entry1[0] is not None and entry1[0].name in stdlibs_without_remote:
20271971
standard, custom = entry1, entry2

0 commit comments

Comments
 (0)