Improve performance of --nth option (#90)

This commit is contained in:
Junegunn Choi 2014-08-15 03:02:07 +09:00
parent 1e07b3b1c2
commit 2d00abc7cb

35
fzf
View File

@ -7,7 +7,7 @@
# / __/ / /_/ __/ # / __/ / /_/ __/
# /_/ /___/_/ Fuzzy finder for your shell # /_/ /___/_/ Fuzzy finder for your shell
# #
# Version: 0.8.7 (Jul 27, 2014) # Version: 0.8.7 (Aug 15, 2014)
# #
# Author: Junegunn Choi # Author: Junegunn Choi
# URL: https://github.com/junegunn/fzf # URL: https://github.com/junegunn/fzf
@ -1102,26 +1102,29 @@ class FZF
def tokenize str def tokenize str
@tokens_cache[str] ||= @tokens_cache[str] ||=
unless @delim begin
# AWK default unless @delim
prefix_length = str[/^\s+/].length rescue 0 # AWK default
[prefix_length, (str.strip.scan(/\S+\s*/) rescue [])] prefix_length = str.index(/\S/) || 0
else tokens = str.strip.scan(/\S+\s*/) rescue []
prefix_length = 0 else
[prefix_length, (str.scan(@delim) rescue [])] prefix_length = 0
tokens = str.scan(@delim) rescue []
end
@nth.map { |n|
(part = tokens[n]) &&
[prefix_length + (tokens[0...(n.begin)] || []).join.length,
part.join.sub(/\s+$/, '')]
}.compact
end end
end end
def do_match str, pat def do_match str, pat
if @nth if @nth
prefix_length, tokens = tokenize str tokenize(str).each do |pair|
prefix_length, token = pair
@nth.each do |n| if md = token.match(pat) rescue nil
if (range = tokens[n]) && (token = range.join) && return MatchData.new(md.offset(0).map { |o| o + prefix_length })
(md = token.sub(/\s+$/, '').match(pat) rescue nil)
prefix_length += (tokens[0...(n.begin)] || []).join.length
offset = md.offset(0).map { |o| o + prefix_length }
return MatchData.new(offset)
end end
end end
nil nil