File tree 1 file changed +5
-7
lines changed
1 file changed +5
-7
lines changed Original file line number Diff line number Diff line change @@ -81,8 +81,7 @@ def _tokenize(source):
81
81
Returns only NAME tokens.
82
82
"""
83
83
readline = _SourceReader (source ).readline
84
- filter_name = lambda token : token [0 ] == tokenize .NAME
85
- return filter (filter_name , tokenize .generate_tokens (readline ))
84
+ return [token for token in tokenize .generate_tokens (readline ) if token [0 ] == tokenize .NAME ]
86
85
87
86
88
87
def _search_symbol (source , symbol ):
@@ -105,12 +104,11 @@ def _search_symbol(source, symbol):
105
104
}
106
105
}
107
106
"""
108
- symbol_tokens = list ( _tokenize (symbol ) )
109
- source_tokens = list ( _tokenize (source ) )
107
+ symbol_tokens = _tokenize (symbol )
108
+ source_tokens = _tokenize (source )
110
109
111
- get_str = lambda token : token [1 ]
112
- symbol_tokens_str = list (map (get_str , symbol_tokens ))
113
- source_tokens_str = list (map (get_str , source_tokens ))
110
+ symbol_tokens_str = [token [1 ] for token in symbol_tokens ]
111
+ source_tokens_str = [token [1 ] for token in source_tokens ]
114
112
115
113
symbol_len = len (symbol_tokens )
116
114
locations = []
You can’t perform that action at this time.
0 commit comments