@@ -751,7 +751,8 @@ async def _collect_internal(
751
751
) -> Union [SemanticTokens , SemanticTokensPartialResult , None ]:
752
752
753
753
from robot .parsing .lexer .tokens import Token as RobotToken
754
- from robot .parsing .model .statements import Fixture , KeywordCall
754
+ from robot .parsing .model .statements import Fixture , KeywordCall , LibraryImport
755
+ from robot .utils .escaping import split_from_equals
755
756
756
757
data = []
757
758
last_line = 0
@@ -760,6 +761,44 @@ async def _collect_internal(
760
761
async def get_tokens () -> AsyncGenerator [Tuple [Token , ast .AST ], None ]:
761
762
async for node in async_ast .iter_nodes (model ):
762
763
if isinstance (node , HasTokens ):
764
+ if isinstance (node , LibraryImport ):
765
+ lib_doc = await namespace .get_imported_library_libdoc (node .name , node .args , node .alias )
766
+ kw_doc = lib_doc .inits .keywords [0 ] if lib_doc and lib_doc .inits else None
767
+ if lib_doc is not None :
768
+
769
+ for token in node .tokens :
770
+ if token .type == RobotToken .ARGUMENT :
771
+ name , value = split_from_equals (token .value )
772
+ if (
773
+ value is not None
774
+ and kw_doc is not None
775
+ and kw_doc .args
776
+ and any (
777
+ v
778
+ for v in kw_doc .args
779
+ if v .kind == KeywordArgumentKind .VAR_NAMED or v .name == name
780
+ )
781
+ ):
782
+ length = len (name )
783
+ yield RobotToken (
784
+ ROBOT_NAMED_ARGUMENT , name , token .lineno , token .col_offset
785
+ ), node
786
+
787
+ yield RobotToken (
788
+ ROBOT_OPERATOR , "=" , token .lineno , token .col_offset + length
789
+ ), node
790
+ yield RobotToken (
791
+ token .type ,
792
+ value ,
793
+ token .lineno ,
794
+ token .col_offset + length + 1 ,
795
+ token .error ,
796
+ ), node
797
+
798
+ continue
799
+
800
+ yield token , node
801
+ continue
763
802
if isinstance (node , (KeywordCall , Fixture )):
764
803
kw_token = cast (
765
804
Token ,
0 commit comments