File tree Expand file tree Collapse file tree 1 file changed +5
-7
lines changed Expand file tree Collapse file tree 1 file changed +5
-7
lines changed Original file line number Diff line number Diff line change @@ -81,8 +81,7 @@ def _tokenize(source):
8181 Returns only NAME tokens.
8282 """
8383 readline = _SourceReader (source ).readline
84- filter_name = lambda token : token [0 ] == tokenize .NAME
85- return filter (filter_name , tokenize .generate_tokens (readline ))
84+ return [token for token in tokenize .generate_tokens (readline ) if token [0 ] == tokenize .NAME ]
8685
8786
8887def _search_symbol (source , symbol ):
@@ -105,12 +104,11 @@ def _search_symbol(source, symbol):
105104 }
106105 }
107106 """
108- symbol_tokens = list ( _tokenize (symbol ) )
109- source_tokens = list ( _tokenize (source ) )
107+ symbol_tokens = _tokenize (symbol )
108+ source_tokens = _tokenize (source )
110109
111- get_str = lambda token : token [1 ]
112- symbol_tokens_str = list (map (get_str , symbol_tokens ))
113- source_tokens_str = list (map (get_str , source_tokens ))
110+ symbol_tokens_str = [token [1 ] for token in symbol_tokens ]
111+ source_tokens_str = [token [1 ] for token in source_tokens ]
114112
115113 symbol_len = len (symbol_tokens )
116114 locations = []
You can’t perform that action at this time.
0 commit comments