33import re
44import sys
55import tokenize
6- from io import BytesIO
76from concurrent .futures import ThreadPoolExecutor
87import importmagic
98from pyls import hookimpl , lsp , _utils
2120_index_cache = {}
2221
2322
23+ class _SourceReader ():
24+ # Used to tokenize python source code
25+ def __init__ (self , source ):
26+ self .lines = re .findall (r'[^\n]*\n' , source )
27+ # To pop lines later
28+ self .lines .reverse ()
29+
30+ def readline (self ):
31+ if self .lines :
32+ return self .lines .pop ()
33+ return ''
34+
35+
2436def _build_index (paths ):
2537 """Build index of symbols from python modules.
2638 """
@@ -66,12 +78,11 @@ def _get_imports_list(source, index=None):
6678
6779def _tokenize (source ):
6880 """Tokenize python source code.
81+ Returns only NAME tokens.
6982 """
70- stream = BytesIO (source .encode ())
71- tokens = tokenize .tokenize (stream .readline )
72- if tokens is None :
73- return []
74- return list (tokens )
83+ readline = _SourceReader (source ).readline
84+ filter_name = lambda token : token [0 ] == tokenize .NAME
85+ return filter (filter_name , tokenize .generate_tokens (readline ))
7586
7687
7788def _search_symbol (source , symbol ):
@@ -94,8 +105,8 @@ def _search_symbol(source, symbol):
94105 }
95106 }
96107 """
97- symbol_tokens = _tokenize (symbol )
98- source_tokens = _tokenize (source )
108+ symbol_tokens = list ( _tokenize (symbol ) )
109+ source_tokens = list ( _tokenize (source ) )
99110
100111 get_str = lambda token : token [1 ]
101112 symbol_tokens_str = list (map (get_str , symbol_tokens ))
0 commit comments