2021-06-01 12:20:20 +00:00
# Copyright (C) 2012-2021 Zammad Foundation, http://zammad-foundation.org/
2017-02-01 11:48:50 +00:00
class HtmlSanitizer
2018-08-01 09:25:38 +00:00
LINKABLE_URL_SCHEMES = URI . scheme_list . keys . map ( & :downcase ) - [ 'mailto' ] + [ 'tel' ]
2019-08-16 13:15:07 +00:00
PROCESSING_TIMEOUT = 20
2019-01-18 15:33:07 +00:00
UNPROCESSABLE_HTML_MSG = 'This message cannot be displayed due to HTML processing issues. Download the raw message below and open it via an Email client if you still wish to view it.' . freeze
2017-02-01 11:48:50 +00:00
2017-03-10 06:49:01 +00:00
= begin
satinize html string based on whiltelist
string = HtmlSanitizer . strict ( string , external )
= end
2019-01-18 15:33:07 +00:00
def self . strict ( string , external = false , timeout : true )
Timeout . timeout ( timeout ? PROCESSING_TIMEOUT : nil ) do
2021-03-16 08:59:32 +00:00
@fqdn = Setting . get ( 'fqdn' )
http_type = Setting . get ( 'http_type' )
web_app_url_prefix = " #{ http_type } :// #{ @fqdn } / \# " . downcase
2017-02-02 18:49:34 +00:00
2019-01-18 15:33:07 +00:00
# config
tags_remove_content = Rails . configuration . html_sanitizer_tags_remove_content
tags_quote_content = Rails . configuration . html_sanitizer_tags_quote_content
2021-07-12 13:18:31 +00:00
tags_allowlist = Rails . configuration . html_sanitizer_tags_allowlist
attributes_allowlist = Rails . configuration . html_sanitizer_attributes_allowlist
css_properties_allowlist = Rails . configuration . html_sanitizer_css_properties_allowlist
css_values_blocklist = Rails . application . config . html_sanitizer_css_values_blocklist
2018-09-25 09:43:52 +00:00
2021-07-12 13:18:31 +00:00
# We allowlist yahoo_quoted because Yahoo Mail marks quoted email content using
2019-01-18 15:33:07 +00:00
# <div class='yahoo_quoted'> and we rely on this class to identify quoted messages
2021-07-12 13:18:31 +00:00
classes_allowlist = %w[ js-signatureMarker yahoo_quoted ]
2019-01-18 15:33:07 +00:00
attributes_2_css = %w[ width height ]
2017-02-01 11:48:50 +00:00
2021-01-08 15:02:19 +00:00
# remove tags with subtree
scrubber_tag_remove = Loofah :: Scrubber . new do | node |
next if tags_remove_content . exclude? ( node . name )
2018-01-02 00:36:05 +00:00
2021-01-08 15:02:19 +00:00
node . remove
Loofah :: Scrubber :: STOP
end
string = Loofah . fragment ( string ) . scrub! ( scrubber_tag_remove ) . to_s
2017-04-26 09:05:58 +00:00
2021-01-08 15:02:19 +00:00
# remove tag, insert quoted content
scrubber_wipe_quote_content = Loofah :: Scrubber . new do | node |
next if tags_quote_content . exclude? ( node . name )
2017-02-01 11:48:50 +00:00
2021-01-08 15:02:19 +00:00
string = html_decode ( node . content )
text = Nokogiri :: XML :: Text . new ( string , node . document )
node . add_next_sibling ( text )
node . remove
Loofah :: Scrubber :: STOP
2017-03-10 06:49:01 +00:00
end
2021-01-08 15:02:19 +00:00
string = Loofah . fragment ( string ) . scrub! ( scrubber_wipe_quote_content ) . to_s
2017-03-10 06:49:01 +00:00
2019-01-18 15:33:07 +00:00
scrubber_wipe = Loofah :: Scrubber . new do | node |
2017-02-01 11:48:50 +00:00
2019-01-18 15:33:07 +00:00
# replace tags, keep subtree
2021-07-12 13:18:31 +00:00
if tags_allowlist . exclude? ( node . name )
2019-01-18 15:33:07 +00:00
node . replace node . children . to_s
Loofah :: Scrubber :: STOP
end
2018-10-09 06:17:41 +00:00
2019-01-18 15:33:07 +00:00
# prepare src attribute
if node [ 'src' ]
2021-01-08 15:02:19 +00:00
src = cleanup_target ( CGI . unescape ( node [ 'src' ] ) )
2021-05-12 11:37:44 +00:00
if src =~ %r{ (javascript|livescript|vbscript): }i || src . downcase . start_with? ( 'http' , 'ftp' , '//' )
2019-01-18 15:33:07 +00:00
node . remove
Loofah :: Scrubber :: STOP
2017-03-10 06:49:01 +00:00
end
2017-10-01 12:25:52 +00:00
end
2019-01-18 15:33:07 +00:00
# clean class / only use allowed classes
if node [ 'class' ]
2021-05-12 11:37:44 +00:00
classes = node [ 'class' ] . gsub ( %r{ \ t| \ n| \ r } , '' ) . split
2019-01-18 15:33:07 +00:00
class_new = ''
classes . each do | local_class |
2021-07-12 13:18:31 +00:00
next if classes_allowlist . exclude? ( local_class . to_s . strip )
2019-01-18 15:33:07 +00:00
if class_new != ''
class_new += ' '
end
class_new += local_class
end
2020-11-05 16:31:00 +00:00
if class_new == ''
2019-01-18 15:33:07 +00:00
node . delete ( 'class' )
2020-11-05 16:31:00 +00:00
else
node [ 'class' ] = class_new
2019-01-18 15:33:07 +00:00
end
2017-03-10 06:49:01 +00:00
end
2019-01-18 15:33:07 +00:00
# move style attributes to css attributes
attributes_2_css . each do | key |
next if ! node [ key ]
2018-10-09 06:17:41 +00:00
2019-01-18 15:33:07 +00:00
if node [ 'style' ] . blank?
node [ 'style' ] = ''
else
node [ 'style' ] += ';'
end
value = node [ key ]
node . delete ( key )
next if value . blank?
2018-10-09 06:17:41 +00:00
2021-05-12 11:37:44 +00:00
value += 'px' if ! value . match? ( %r{ %|px|em }i )
2019-01-18 15:33:07 +00:00
node [ 'style' ] += " #{ key } : #{ value } "
end
2017-03-10 06:49:01 +00:00
2019-01-18 15:33:07 +00:00
# clean style / only use allowed style properties
if node [ 'style' ]
2021-05-12 11:37:44 +00:00
pears = node [ 'style' ] . downcase . gsub ( %r{ \ t| \ n| \ r } , '' ) . split ( ';' )
2019-01-18 15:33:07 +00:00
style = ''
pears . each do | local_pear |
prop = local_pear . split ( ':' )
next if ! prop [ 0 ]
2018-10-09 06:17:41 +00:00
2019-01-18 15:33:07 +00:00
key = prop [ 0 ] . strip
2021-07-12 13:18:31 +00:00
next if css_properties_allowlist . exclude? ( node . name )
next if css_properties_allowlist [ node . name ] . exclude? ( key )
next if css_values_blocklist [ node . name ] & . include? ( local_pear . gsub ( %r{ [[:space:]] } , '' ) . strip )
2018-10-09 06:17:41 +00:00
2019-01-18 15:33:07 +00:00
style += " #{ local_pear } ; "
end
node [ 'style' ] = style
if style == ''
node . delete ( 'style' )
end
2017-02-01 11:48:50 +00:00
end
2019-01-18 15:33:07 +00:00
# scan for invalid link content
%w[ href style ] . each do | attribute_name |
next if ! node [ attribute_name ]
2018-10-09 06:17:41 +00:00
2019-01-18 15:33:07 +00:00
href = cleanup_target ( node [ attribute_name ] )
2021-05-12 11:37:44 +00:00
next if ! href . match? ( %r{ (javascript|livescript|vbscript): }i )
2018-10-09 06:17:41 +00:00
2019-01-18 15:33:07 +00:00
node . delete ( attribute_name )
end
2017-02-02 18:49:34 +00:00
2021-07-12 13:18:31 +00:00
# remove attributes if not allowlisted
2019-01-18 15:33:07 +00:00
node . each do | attribute , _value |
attribute_name = attribute . downcase
2021-07-12 13:18:31 +00:00
next if attributes_allowlist [ :all ] . include? ( attribute_name ) || attributes_allowlist [ node . name ] & . include? ( attribute_name )
2018-10-09 06:17:41 +00:00
2019-01-18 15:33:07 +00:00
node . delete ( attribute )
end
2017-02-02 18:49:34 +00:00
2017-03-10 06:49:01 +00:00
end
2017-04-26 06:38:46 +00:00
2019-01-18 15:33:07 +00:00
done = true
while done
new_string = Loofah . fragment ( string ) . scrub! ( scrubber_wipe ) . to_s
if string == new_string
done = false
end
string = new_string
2017-04-26 06:38:46 +00:00
end
2017-04-26 09:05:58 +00:00
2021-01-08 15:02:19 +00:00
scrubber_link = Loofah :: Scrubber . new do | node |
2020-10-27 07:14:20 +00:00
2021-01-08 15:02:19 +00:00
# wrap plain-text URLs in <a> tags
if node . is_a? ( Nokogiri :: XML :: Text ) && node . content . present? && node . content . include? ( ':' ) && node . ancestors . map ( & :name ) . exclude? ( 'a' )
urls = URI . extract ( node . content , LINKABLE_URL_SCHEMES )
2021-09-30 07:18:16 +00:00
. map { | u | u . sub ( %r{ [,.]$ } , '' ) } # URI::extract captures trailing dots/commas
. grep_v ( %r{ ^[^:]+:$ } ) # URI::extract will match, e.g., 'tel:'
2021-01-08 15:02:19 +00:00
next if urls . blank?
add_link ( node . content , urls , node )
end
# prepare links
if node [ 'href' ]
href = cleanup_target ( node [ 'href' ] , keep_spaces : true )
2021-05-12 11:37:44 +00:00
href_without_spaces = href . gsub ( %r{ [[:space:]] } , '' )
2021-01-08 15:02:19 +00:00
if external && href_without_spaces . present? && ! href_without_spaces . downcase . start_with? ( 'mailto:' ) && ! href_without_spaces . downcase . start_with? ( '//' ) && href_without_spaces . downcase !~ %r{ ^. { 1,6 } ://.+? }
node [ 'href' ] = " http:// #{ node [ 'href' ] } "
href = node [ 'href' ]
2021-05-12 11:37:44 +00:00
href_without_spaces = href . gsub ( %r{ [[:space:]] } , '' )
2021-01-08 15:02:19 +00:00
end
2021-05-12 11:37:44 +00:00
next if ! CGI . unescape ( href_without_spaces ) . utf8_encode ( fallback : :read_as_sanitized_binary ) . gsub ( %r{ [[:space:]] } , '' ) . downcase . start_with? ( 'http' , 'ftp' , '//' )
2021-01-08 15:02:19 +00:00
node . set_attribute ( 'href' , href )
node . set_attribute ( 'rel' , 'nofollow noreferrer noopener' )
2021-03-16 08:59:32 +00:00
# do not "target=_blank" WebApp URLs (e.g. mentions)
if ! href . downcase . start_with? ( web_app_url_prefix )
node . set_attribute ( 'target' , '_blank' )
end
2021-01-08 15:02:19 +00:00
end
if node . name == 'a' && node [ 'href' ] . blank?
node . replace node . children . to_s
Loofah :: Scrubber :: STOP
end
# check if href is different to text
if node . name == 'a' && ! url_same? ( node [ 'href' ] , node . text ) && node [ 'title' ] . blank?
node [ 'title' ] = node [ 'href' ]
end
2020-10-27 07:14:20 +00:00
end
2019-01-18 15:33:07 +00:00
Loofah . fragment ( string ) . scrub! ( scrubber_link ) . to_s
end
2019-06-28 11:38:49 +00:00
rescue Timeout :: Error
2019-08-16 13:15:07 +00:00
Rails . logger . error " Could not process string via HtmlSanitizer.strict in #{ PROCESSING_TIMEOUT } seconds. Current state: #{ string } "
2019-01-18 15:33:07 +00:00
UNPROCESSABLE_HTML_MSG
2017-02-01 11:48:50 +00:00
end
2017-03-10 06:49:01 +00:00
= begin
cleanup html string :
2017-04-28 10:16:15 +00:00
* remove empty nodes ( p , div , span , table )
2017-03-10 06:49:01 +00:00
* remove nodes in general ( keep content - span )
string = HtmlSanitizer . cleanup ( string )
= end
2019-01-18 15:33:07 +00:00
def self . cleanup ( string , timeout : true )
Timeout . timeout ( timeout ? PROCESSING_TIMEOUT : nil ) do
2021-05-12 11:37:44 +00:00
string . gsub! ( %r{ <[A-z]:[A-z]> } , '' )
2019-01-18 15:33:07 +00:00
string . gsub! ( %r{ </[A-z]:[A-z]> } , '' )
string . delete! ( " \t " )
2017-03-10 06:49:01 +00:00
2019-01-18 15:33:07 +00:00
# remove all new lines
2021-05-12 11:37:44 +00:00
string . gsub! ( %r{ ( \ n \ r| \ r \ r \ n| \ r \ n| \ n) } , " \n " )
2017-03-10 06:49:01 +00:00
2019-01-18 15:33:07 +00:00
# remove double multiple empty lines
2021-05-12 11:37:44 +00:00
string . gsub! ( %r{ \ n \ n \ n+ } , " \n \n " )
2017-03-10 06:49:01 +00:00
2019-01-18 15:33:07 +00:00
string = cleanup_structure ( string , 'pre' )
string = cleanup_structure ( string )
string
end
2019-06-28 11:38:49 +00:00
rescue Timeout :: Error
2019-08-16 13:15:07 +00:00
Rails . logger . error " Could not process string via HtmlSanitizer.cleanup in #{ PROCESSING_TIMEOUT } seconds. Current state: #{ string } "
2019-01-18 15:33:07 +00:00
UNPROCESSABLE_HTML_MSG
2017-03-10 06:49:01 +00:00
end
2021-01-08 15:02:19 +00:00
def self . remove_last_empty_node ( node , remove_empty_nodes , remove_empty_last_nodes )
if node . children . present?
if node . children . size == 1
local_name = node . name
child = node . children . first
# replace not needed node (parent <- child)
if local_name == child . name && node . attributes . present? && node . children . first . attributes . blank?
local_node_child = node . children . first
node . attributes . each do | k |
local_node_child . set_attribute ( k [ 0 ] , k [ 1 ] )
end
node . replace local_node_child . to_s
Loofah :: Scrubber :: STOP
2018-10-09 06:17:41 +00:00
2021-01-08 15:02:19 +00:00
# replace not needed node (parent replace with child node)
elsif ( local_name == 'span' || local_name == child . name ) && node . attributes . blank?
node . replace node . children . to_s
Loofah :: Scrubber :: STOP
end
else
2021-01-04 12:57:57 +00:00
2021-01-08 15:02:19 +00:00
# loop through nodes
node . children . each do | local_node |
remove_last_empty_node ( local_node , remove_empty_nodes , remove_empty_last_nodes )
end
end
# remove empty nodes
elsif ( remove_empty_nodes . include? ( node . name ) || remove_empty_last_nodes . include? ( node . name ) ) && node . content . blank? && node . attributes . blank?
node . remove
2017-03-10 06:49:01 +00:00
Loofah :: Scrubber :: STOP
end
2017-02-02 18:49:34 +00:00
end
2017-04-28 10:16:15 +00:00
def self . cleanup_structure ( string , type = 'all' )
remove_empty_nodes = if type == 'pre'
2017-11-23 08:09:44 +00:00
%w[ span ]
2017-04-28 10:16:15 +00:00
else
2017-11-23 08:09:44 +00:00
%w[ p div span small table ]
2017-04-28 10:16:15 +00:00
end
2017-11-23 08:09:44 +00:00
remove_empty_last_nodes = %w[ b i u small table ]
2017-03-10 06:49:01 +00:00
2017-03-21 08:02:15 +00:00
# remove last empty nodes and empty -not needed- parrent nodes
scrubber_structure = Loofah :: Scrubber . new do | node |
2021-01-08 15:02:19 +00:00
remove_last_empty_node ( node , remove_empty_nodes , remove_empty_last_nodes )
2017-03-21 08:02:15 +00:00
end
done = true
while done
new_string = Loofah . fragment ( string ) . scrub! ( scrubber_structure ) . to_s
if string == new_string
done = false
2017-03-10 06:49:01 +00:00
end
2017-03-21 08:02:15 +00:00
string = new_string
end
scrubber_cleanup = Loofah :: Scrubber . new do | node |
2017-03-10 06:49:01 +00:00
# remove not needed new lines
2020-10-22 13:57:01 +00:00
if node . instance_of? ( Nokogiri :: XML :: Text )
2020-09-30 09:07:01 +00:00
if ! node . parent || ( node . parent . name != 'pre' && node . parent . name != 'code' ) # rubocop:disable Style/SoleNestedConditional
2017-03-10 06:49:01 +00:00
content = node . content
if content
if content != ' ' && content != " \n "
2021-05-12 11:37:44 +00:00
content . gsub! ( %r{ [[:space:]]+ } , ' ' )
2017-03-10 06:49:01 +00:00
end
if node . previous
if node . previous . name == 'div' || node . previous . name == 'p'
content . strip!
end
2017-03-21 14:21:01 +00:00
elsif node . parent && ! node . previous && ( ! node . next || node . next . name == 'div' || node . next . name == 'p' || node . next . name == 'br' )
2017-03-10 06:49:01 +00:00
if ( node . parent . name == 'div' || node . parent . name == 'p' ) && content != ' ' && content != " \n "
content . strip!
end
end
node . content = content
end
end
end
end
2017-03-21 08:02:15 +00:00
Loofah . fragment ( string ) . scrub! ( scrubber_cleanup ) . to_s
2017-03-10 06:49:01 +00:00
end
def self . add_link ( content , urls , node )
2017-11-21 07:24:03 +00:00
if urls . blank?
2017-03-10 06:49:01 +00:00
text = Nokogiri :: XML :: Text . new ( content , node . document )
node . add_next_sibling ( text )
return
end
url = urls . shift
2021-05-12 11:37:44 +00:00
if content =~ %r{ ^(.*) #{ Regexp . quote ( url ) } (.*)$ }mx
2017-03-10 06:49:01 +00:00
pre = $1
post = $2
2021-05-12 11:37:44 +00:00
if url . match? ( %r{ ^www }i )
2017-03-10 06:49:01 +00:00
url = " http:// #{ url } "
end
a = Nokogiri :: XML :: Node . new 'a' , node . document
a [ 'href' ] = url
2017-04-17 23:20:47 +00:00
a [ 'rel' ] = 'nofollow noreferrer noopener'
2017-03-10 06:49:01 +00:00
a [ 'target' ] = '_blank'
a . content = url
if node . class != Nokogiri :: XML :: Text
text = Nokogiri :: XML :: Text . new ( pre , node . document )
node . add_next_sibling ( text ) . add_next_sibling ( a )
return if post . blank?
2018-10-09 06:17:41 +00:00
2017-03-10 06:49:01 +00:00
add_link ( post , urls , a )
return
end
node . content = pre
node . add_next_sibling ( a )
return if post . blank?
2018-10-09 06:17:41 +00:00
2017-03-10 06:49:01 +00:00
add_link ( post , urls , a )
end
2017-11-23 08:09:44 +00:00
true
2017-03-10 06:49:01 +00:00
end
2017-03-22 06:55:33 +00:00
def self . html_decode ( string )
string . gsub ( '&' , '&' ) . gsub ( '<' , '<' ) . gsub ( '>' , '>' ) . gsub ( '"' , '"' ) . gsub ( ' ' , ' ' )
end
2018-06-07 10:38:09 +00:00
def self . cleanup_target ( string , ** options )
2020-07-13 06:38:11 +00:00
cleaned_string = string . utf8_encode ( fallback : :read_as_sanitized_binary )
2021-05-12 11:37:44 +00:00
cleaned_string = cleaned_string . gsub ( %r{ [[:space:]] } , '' ) if ! options [ :keep_spaces ]
2018-06-07 10:38:09 +00:00
cleaned_string = cleaned_string . strip
. delete ( " \t \n \r \ u0000 " )
. gsub ( %r{ / \ *.*? \ */ } , '' )
2021-05-12 11:37:44 +00:00
. gsub ( %r{ <!--.*?--> } , '' )
2018-06-07 10:38:09 +00:00
2018-05-24 12:46:22 +00:00
sanitize_attachment_disposition ( cleaned_string )
end
def self . sanitize_attachment_disposition ( url )
2018-11-26 14:57:40 +00:00
@fqdn || = Setting . get ( 'fqdn' )
2018-05-24 12:46:22 +00:00
uri = URI ( url )
2018-11-26 14:57:40 +00:00
if uri . host == @fqdn && uri . query . present?
2018-06-07 10:38:09 +00:00
params = CGI . parse ( uri . query || '' )
. tap { | p | p . merge! ( 'disposition' = > 'attachment' ) if p . include? ( 'disposition' ) }
uri . query = URI . encode_www_form ( params )
2018-05-24 12:46:22 +00:00
end
uri . to_s
2018-09-24 09:21:30 +00:00
rescue
2018-05-24 12:46:22 +00:00
url
2017-03-10 06:49:01 +00:00
end
def self . url_same? ( url_new , url_old )
2021-05-12 11:37:44 +00:00
url_new = CGI . unescape ( url_new . to_s ) . utf8_encode ( fallback : :read_as_sanitized_binary ) . downcase . delete_suffix ( '/' ) . gsub ( %r{ [[:space:]]| \ t| \ n| \ r } , '' ) . strip
url_old = CGI . unescape ( url_old . to_s ) . utf8_encode ( fallback : :read_as_sanitized_binary ) . downcase . delete_suffix ( '/' ) . gsub ( %r{ [[:space:]]| \ t| \ n| \ r } , '' ) . strip
2017-03-22 06:55:33 +00:00
url_new = html_decode ( url_new ) . sub ( '/?' , '?' )
url_old = html_decode ( url_old ) . sub ( '/?' , '?' )
2017-03-10 06:49:01 +00:00
return true if url_new == url_old
2017-11-23 08:09:44 +00:00
return true if url_old == " http:// #{ url_new } "
return true if url_new == " http:// #{ url_old } "
return true if url_old == " https:// #{ url_new } "
return true if url_new == " https:// #{ url_old } "
2018-10-09 06:17:41 +00:00
2017-03-10 06:49:01 +00:00
false
2017-02-02 18:49:34 +00:00
end
2017-03-17 05:27:50 +00:00
= begin
reolace inline images with cid images
string = HtmlSanitizer . replace_inline_images ( article . body )
= end
2021-09-20 10:47:05 +00:00
def self . replace_inline_images ( string , prefix = SecureRandom . uuid )
2018-11-26 14:57:40 +00:00
fqdn = Setting . get ( 'fqdn' )
2017-03-17 05:27:50 +00:00
attachments_inline = [ ]
2018-06-25 09:25:30 +00:00
filename_counter = 0
2017-03-17 05:27:50 +00:00
scrubber = Loofah :: Scrubber . new do | node |
if node . name == 'img'
if node [ 'src' ] && node [ 'src' ] =~ %r{ ^(data:image/(jpeg|png);base64,.+?)$ }i
2018-06-25 09:25:30 +00:00
filename_counter += 1
2017-03-17 05:27:50 +00:00
file_attributes = StaticAssets . data_url_attributes ( $1 )
2021-09-20 10:47:05 +00:00
cid = " #{ prefix } . #{ SecureRandom . uuid } @ #{ fqdn } "
2018-06-25 09:25:30 +00:00
filename = cid
if file_attributes [ :file_extention ] . present?
filename = " image #{ filename_counter } . #{ file_attributes [ :file_extention ] } "
end
2017-03-17 05:27:50 +00:00
attachment = {
2018-12-19 17:31:51 +00:00
data : file_attributes [ :content ] ,
filename : filename ,
2017-03-17 05:27:50 +00:00
preferences : {
2018-12-19 17:31:51 +00:00
'Content-Type' = > file_attributes [ :mime_type ] ,
'Mime-Type' = > file_attributes [ :mime_type ] ,
'Content-ID' = > cid ,
2017-03-17 05:27:50 +00:00
'Content-Disposition' = > 'inline' ,
} ,
}
attachments_inline . push attachment
node [ 'src' ] = " cid: #{ cid } "
end
Loofah :: Scrubber :: STOP
end
end
[ Loofah . fragment ( string ) . scrub! ( scrubber ) . to_s , attachments_inline ]
end
= begin
satinize style of img tags
string = HtmlSanitizer . dynamic_image_size ( article . body )
= end
def self . dynamic_image_size ( string )
scrubber = Loofah :: Scrubber . new do | node |
if node . name == 'img'
if node [ 'src' ]
style = 'max-width:100%;'
if node [ 'style' ]
2021-05-12 11:37:44 +00:00
pears = node [ 'style' ] . downcase . gsub ( %r{ \ t| \ n| \ r } , '' ) . split ( ';' )
2017-10-01 12:25:52 +00:00
pears . each do | local_pear |
2017-03-17 05:27:50 +00:00
prop = local_pear . split ( ':' )
next if ! prop [ 0 ]
2018-10-09 06:17:41 +00:00
2017-03-17 05:27:50 +00:00
key = prop [ 0 ] . strip
if key == 'height'
key = 'max-height'
end
style += " #{ key } : #{ prop [ 1 ] } ; "
2017-10-01 12:25:52 +00:00
end
2017-03-17 05:27:50 +00:00
end
node [ 'style' ] = style
end
Loofah :: Scrubber :: STOP
end
end
Loofah . fragment ( string ) . scrub! ( scrubber ) . to_s
end
2017-03-10 06:49:01 +00:00
private_class_method :cleanup_target
2018-06-07 10:38:09 +00:00
private_class_method :sanitize_attachment_disposition
2017-03-10 06:49:01 +00:00
private_class_method :add_link
private_class_method :url_same?
2017-03-22 06:55:33 +00:00
private_class_method :html_decode
2017-02-02 18:49:34 +00:00
2017-02-01 11:48:50 +00:00
end