Improve performance by not checking empty strings or strings not containing ':' for URLs (issue #2374).
This commit is contained in:
parent
0711953beb
commit
4e024291a4
1 changed files with 1 additions and 1 deletions
|
@ -30,7 +30,7 @@ satinize html string based on whiltelist
|
|||
scrubber_link = Loofah::Scrubber.new do |node|
|
||||
|
||||
# wrap plain-text URLs in <a> tags
|
||||
if node.is_a?(Nokogiri::XML::Text) && node.ancestors.map(&:name).exclude?('a')
|
||||
if node.is_a?(Nokogiri::XML::Text) && node.content.present? && node.content.include?(':') && node.ancestors.map(&:name).exclude?('a')
|
||||
urls = URI.extract(node.content, LINKABLE_URL_SCHEMES)
|
||||
.map { |u| u.sub(/[,.]$/, '') } # URI::extract captures trailing dots/commas
|
||||
.reject { |u| u.match?(/^[^:]+:$/) } # URI::extract will match, e.g., 'tel:'
|
||||
|
|
Loading…
Reference in a new issue