2017-02-01 11:48:50 +00:00
class HtmlSanitizer
2018-08-01 09:25:38 +00:00
LINKABLE_URL_SCHEMES = URI . scheme_list . keys . map ( & :downcase ) - [ 'mailto' ] + [ 'tel' ]
2019-08-16 13:15:07 +00:00
PROCESSING_TIMEOUT = 20
2019-01-18 15:33:07 +00:00
UNPROCESSABLE_HTML_MSG = 'This message cannot be displayed due to HTML processing issues. Download the raw message below and open it via an Email client if you still wish to view it.' . freeze
2017-02-01 11:48:50 +00:00
2017-03-10 06:49:01 +00:00
= begin
satinize html string based on whiltelist
string = HtmlSanitizer . strict ( string , external )
= end
2019-01-18 15:33:07 +00:00
def self . strict ( string , external = false , timeout : true )
Timeout . timeout ( timeout ? PROCESSING_TIMEOUT : nil ) do
@fqdn = Setting . get ( 'fqdn' )
2017-02-02 18:49:34 +00:00
2019-01-18 15:33:07 +00:00
# config
tags_remove_content = Rails . configuration . html_sanitizer_tags_remove_content
tags_quote_content = Rails . configuration . html_sanitizer_tags_quote_content
tags_whitelist = Rails . configuration . html_sanitizer_tags_whitelist
attributes_whitelist = Rails . configuration . html_sanitizer_attributes_whitelist
css_properties_whitelist = Rails . configuration . html_sanitizer_css_properties_whitelist
css_values_blacklist = Rails . application . config . html_sanitizer_css_values_backlist
2018-09-25 09:43:52 +00:00
2019-01-18 15:33:07 +00:00
# We whitelist yahoo_quoted because Yahoo Mail marks quoted email content using
# <div class='yahoo_quoted'> and we rely on this class to identify quoted messages
classes_whitelist = [ 'js-signatureMarker' , 'yahoo_quoted' ]
attributes_2_css = %w[ width height ]
2017-02-01 11:48:50 +00:00
2019-01-18 15:33:07 +00:00
# remove html comments
string . gsub! ( / <!--.+?--> /m , '' )
2017-11-21 07:24:03 +00:00
2019-01-18 15:33:07 +00:00
scrubber_link = Loofah :: Scrubber . new do | node |
2017-04-26 09:05:58 +00:00
2019-01-18 15:33:07 +00:00
# wrap plain-text URLs in <a> tags
if node . is_a? ( Nokogiri :: XML :: Text ) && node . content . present? && node . content . include? ( ':' ) && node . ancestors . map ( & :name ) . exclude? ( 'a' )
urls = URI . extract ( node . content , LINKABLE_URL_SCHEMES )
. map { | u | u . sub ( / [,.]$ / , '' ) } # URI::extract captures trailing dots/commas
. reject { | u | u . match? ( / ^[^:]+:$ / ) } # URI::extract will match, e.g., 'tel:'
2018-08-01 09:25:38 +00:00
2019-01-18 15:33:07 +00:00
next if urls . blank?
2018-10-09 06:17:41 +00:00
2019-01-18 15:33:07 +00:00
add_link ( node . content , urls , node )
2017-04-26 09:05:58 +00:00
end
2018-04-03 15:46:45 +00:00
2019-01-18 15:33:07 +00:00
# prepare links
if node [ 'href' ]
href = cleanup_target ( node [ 'href' ] , keep_spaces : true )
href_without_spaces = href . gsub ( / [[:space:]] / , '' )
if external && href_without_spaces . present? && ! href_without_spaces . downcase . start_with? ( '//' ) && href_without_spaces . downcase !~ %r{ ^. { 1,6 } ://.+? }
node [ 'href' ] = " http:// #{ node [ 'href' ] } "
href = node [ 'href' ]
href_without_spaces = href . gsub ( / [[:space:]] / , '' )
end
2018-10-09 06:17:41 +00:00
2019-01-18 15:33:07 +00:00
next if ! href_without_spaces . downcase . start_with? ( 'http' , 'ftp' , '//' )
2018-01-02 00:36:05 +00:00
2019-01-18 15:33:07 +00:00
node . set_attribute ( 'href' , href )
node . set_attribute ( 'rel' , 'nofollow noreferrer noopener' )
node . set_attribute ( 'target' , '_blank' )
2018-01-02 00:36:05 +00:00
end
2017-04-26 09:05:58 +00:00
2019-01-18 15:33:07 +00:00
if node . name == 'a' && node [ 'href' ] . blank?
node . replace node . children . to_s
Loofah :: Scrubber :: STOP
end
2017-02-01 11:48:50 +00:00
2019-01-18 15:33:07 +00:00
# check if href is different to text
if node . name == 'a' && ! url_same? ( node [ 'href' ] , node . text )
if node [ 'title' ] . blank?
node [ 'title' ] = node [ 'href' ]
end
end
2017-03-10 06:49:01 +00:00
end
2019-01-18 15:33:07 +00:00
scrubber_wipe = Loofah :: Scrubber . new do | node |
2017-02-01 11:48:50 +00:00
2019-01-18 15:33:07 +00:00
# remove tags with subtree
if tags_remove_content . include? ( node . name )
node . remove
Loofah :: Scrubber :: STOP
end
2017-02-01 11:48:50 +00:00
2019-01-18 15:33:07 +00:00
# remove tag, insert quoted content
if tags_quote_content . include? ( node . name )
string = html_decode ( node . content )
text = Nokogiri :: XML :: Text . new ( string , node . document )
node . add_next_sibling ( text )
2017-03-10 06:49:01 +00:00
node . remove
Loofah :: Scrubber :: STOP
2017-02-01 11:48:50 +00:00
end
2019-01-18 15:33:07 +00:00
# replace tags, keep subtree
if ! tags_whitelist . include? ( node . name )
node . replace node . children . to_s
Loofah :: Scrubber :: STOP
end
2018-10-09 06:17:41 +00:00
2019-01-18 15:33:07 +00:00
# prepare src attribute
if node [ 'src' ]
src = cleanup_target ( node [ 'src' ] )
if src =~ / (javascript|livescript|vbscript): /i || src . downcase . start_with? ( 'http' , 'ftp' , '//' )
node . remove
Loofah :: Scrubber :: STOP
2017-03-10 06:49:01 +00:00
end
2017-10-01 12:25:52 +00:00
end
2019-01-18 15:33:07 +00:00
# clean class / only use allowed classes
if node [ 'class' ]
classes = node [ 'class' ] . gsub ( / \ t| \ n| \ r / , '' ) . split ( ' ' )
class_new = ''
classes . each do | local_class |
next if ! classes_whitelist . include? ( local_class . to_s . strip )
if class_new != ''
class_new += ' '
end
class_new += local_class
end
if class_new != ''
node [ 'class' ] = class_new
else
node . delete ( 'class' )
end
2017-03-10 06:49:01 +00:00
end
2019-01-18 15:33:07 +00:00
# move style attributes to css attributes
attributes_2_css . each do | key |
next if ! node [ key ]
2018-10-09 06:17:41 +00:00
2019-01-18 15:33:07 +00:00
if node [ 'style' ] . blank?
node [ 'style' ] = ''
else
node [ 'style' ] += ';'
end
value = node [ key ]
node . delete ( key )
next if value . blank?
2018-10-09 06:17:41 +00:00
2019-01-18 15:33:07 +00:00
value += 'px' if ! value . match? ( / %|px|em /i )
node [ 'style' ] += " #{ key } : #{ value } "
end
2017-03-10 06:49:01 +00:00
2019-01-18 15:33:07 +00:00
# clean style / only use allowed style properties
if node [ 'style' ]
pears = node [ 'style' ] . downcase . gsub ( / \ t| \ n| \ r / , '' ) . split ( ';' )
style = ''
pears . each do | local_pear |
prop = local_pear . split ( ':' )
next if ! prop [ 0 ]
2018-10-09 06:17:41 +00:00
2019-01-18 15:33:07 +00:00
key = prop [ 0 ] . strip
next if ! css_properties_whitelist . include? ( node . name )
next if ! css_properties_whitelist [ node . name ] . include? ( key )
next if css_values_blacklist [ node . name ] & . include? ( local_pear . gsub ( / [[:space:]] / , '' ) . strip )
2018-10-09 06:17:41 +00:00
2019-01-18 15:33:07 +00:00
style += " #{ local_pear } ; "
end
node [ 'style' ] = style
if style == ''
node . delete ( 'style' )
end
2017-02-01 11:48:50 +00:00
end
2019-01-18 15:33:07 +00:00
# scan for invalid link content
%w[ href style ] . each do | attribute_name |
next if ! node [ attribute_name ]
2018-10-09 06:17:41 +00:00
2019-01-18 15:33:07 +00:00
href = cleanup_target ( node [ attribute_name ] )
next if href !~ / (javascript|livescript|vbscript): /i
2018-10-09 06:17:41 +00:00
2019-01-18 15:33:07 +00:00
node . delete ( attribute_name )
end
2017-02-02 18:49:34 +00:00
2019-01-18 15:33:07 +00:00
# remove attributes if not whitelisted
node . each do | attribute , _value |
attribute_name = attribute . downcase
next if attributes_whitelist [ :all ] . include? ( attribute_name ) || ( attributes_whitelist [ node . name ] & . include? ( attribute_name ) )
2018-10-09 06:17:41 +00:00
2019-01-18 15:33:07 +00:00
node . delete ( attribute )
end
2017-02-02 18:49:34 +00:00
2019-01-18 15:33:07 +00:00
# remove mailto links
if node [ 'href' ]
href = cleanup_target ( node [ 'href' ] )
if href =~ / mailto:(.*)$ /i
text = Nokogiri :: XML :: Text . new ( $1 , node . document )
node . add_next_sibling ( text )
node . remove
Loofah :: Scrubber :: STOP
end
2017-03-10 06:49:01 +00:00
end
end
2017-04-26 06:38:46 +00:00
2019-01-18 15:33:07 +00:00
done = true
while done
new_string = Loofah . fragment ( string ) . scrub! ( scrubber_wipe ) . to_s
if string == new_string
done = false
end
string = new_string
2017-04-26 06:38:46 +00:00
end
2017-04-26 09:05:58 +00:00
2019-01-18 15:33:07 +00:00
Loofah . fragment ( string ) . scrub! ( scrubber_link ) . to_s
end
2019-06-28 11:38:49 +00:00
rescue Timeout :: Error
2019-08-16 13:15:07 +00:00
Rails . logger . error " Could not process string via HtmlSanitizer.strict in #{ PROCESSING_TIMEOUT } seconds. Current state: #{ string } "
2019-01-18 15:33:07 +00:00
UNPROCESSABLE_HTML_MSG
2017-02-01 11:48:50 +00:00
end
2017-03-10 06:49:01 +00:00
= begin
cleanup html string :
2017-04-28 10:16:15 +00:00
* remove empty nodes ( p , div , span , table )
2017-03-10 06:49:01 +00:00
* remove nodes in general ( keep content - span )
string = HtmlSanitizer . cleanup ( string )
= end
2019-01-18 15:33:07 +00:00
def self . cleanup ( string , timeout : true )
Timeout . timeout ( timeout ? PROCESSING_TIMEOUT : nil ) do
string . gsub! ( / <[A-z]:[A-z]> / , '' )
string . gsub! ( %r{ </[A-z]:[A-z]> } , '' )
string . delete! ( " \t " )
2017-03-10 06:49:01 +00:00
2019-01-18 15:33:07 +00:00
# remove all new lines
string . gsub! ( / ( \ n \ r| \ r \ r \ n| \ r \ n| \ n) / , " \n " )
2017-03-10 06:49:01 +00:00
2019-01-18 15:33:07 +00:00
# remove double multiple empty lines
string . gsub! ( / \ n \ n \ n+ / , " \n \n " )
2017-03-10 06:49:01 +00:00
2019-01-18 15:33:07 +00:00
string = cleanup_structure ( string , 'pre' )
string = cleanup_replace_tags ( string )
string = cleanup_structure ( string )
string
end
2019-06-28 11:38:49 +00:00
rescue Timeout :: Error
2019-08-16 13:15:07 +00:00
Rails . logger . error " Could not process string via HtmlSanitizer.cleanup in #{ PROCESSING_TIMEOUT } seconds. Current state: #{ string } "
2019-01-18 15:33:07 +00:00
UNPROCESSABLE_HTML_MSG
2017-03-10 06:49:01 +00:00
end
def self . cleanup_replace_tags ( string )
2017-04-28 10:16:15 +00:00
#return string
2017-11-23 08:09:44 +00:00
tags_backlist = %w[ span center ]
2017-03-10 06:49:01 +00:00
scrubber = Loofah :: Scrubber . new do | node |
next if ! tags_backlist . include? ( node . name )
2018-10-09 06:17:41 +00:00
2017-04-28 10:16:15 +00:00
hit = false
local_node = nil
2017-10-01 12:25:52 +00:00
( 1 .. 5 ) . each do | _count |
2017-04-28 10:16:15 +00:00
local_node = if local_node
local_node . parent
else
node . parent
end
break if ! local_node
next if local_node . name != 'td'
2018-10-09 06:17:41 +00:00
2017-04-28 10:16:15 +00:00
hit = true
2017-10-01 12:25:52 +00:00
end
2017-04-28 10:16:15 +00:00
next if hit && node . keys . count . positive?
2018-10-09 06:17:41 +00:00
2017-03-10 06:49:01 +00:00
node . replace cleanup_replace_tags ( node . children . to_s )
Loofah :: Scrubber :: STOP
end
Loofah . fragment ( string ) . scrub! ( scrubber ) . to_s
2017-02-02 18:49:34 +00:00
end
2017-04-28 10:16:15 +00:00
def self . cleanup_structure ( string , type = 'all' )
remove_empty_nodes = if type == 'pre'
2017-11-23 08:09:44 +00:00
%w[ span ]
2017-04-28 10:16:15 +00:00
else
2017-11-23 08:09:44 +00:00
%w[ p div span small table ]
2017-04-28 10:16:15 +00:00
end
2017-11-23 08:09:44 +00:00
remove_empty_last_nodes = %w[ b i u small table ]
2017-03-10 06:49:01 +00:00
2017-03-21 08:02:15 +00:00
# remove last empty nodes and empty -not needed- parrent nodes
scrubber_structure = Loofah :: Scrubber . new do | node |
2017-03-10 06:49:01 +00:00
if remove_empty_last_nodes . include? ( node . name ) && node . children . size . zero?
node . remove
Loofah :: Scrubber :: STOP
end
2017-04-28 10:16:15 +00:00
# remove empty childs
if node . content . blank? && remove_empty_nodes . include? ( node . name ) && node . children . size == 1 && remove_empty_nodes . include? ( node . children . first . name )
node . replace node . children . to_s
Loofah :: Scrubber :: STOP
end
# remove empty childs
if remove_empty_nodes . include? ( node . name ) && node . children . size == 1 && remove_empty_nodes . include? ( node . children . first . name ) && node . children . first . content == node . content
2017-03-21 08:02:15 +00:00
node . replace node . children . to_s
Loofah :: Scrubber :: STOP
end
2017-04-28 10:16:15 +00:00
# remove node if empty and parent was already a remove node
if node . content . blank? && remove_empty_nodes . include? ( node . name ) && node . parent && node . children . size . zero? && remove_empty_nodes . include? ( node . parent . name )
node . remove
Loofah :: Scrubber :: STOP
end
2017-03-21 08:02:15 +00:00
end
done = true
while done
new_string = Loofah . fragment ( string ) . scrub! ( scrubber_structure ) . to_s
if string == new_string
done = false
2017-03-10 06:49:01 +00:00
end
2017-03-21 08:02:15 +00:00
string = new_string
end
scrubber_cleanup = Loofah :: Scrubber . new do | node |
2017-03-10 06:49:01 +00:00
# remove mailto links
if node [ 'href' ]
href = cleanup_target ( node [ 'href' ] )
if href =~ / mailto:(.*)$ /i
text = Nokogiri :: XML :: Text . new ( $1 , node . document )
node . add_next_sibling ( text )
node . remove
Loofah :: Scrubber :: STOP
end
end
# remove not needed new lines
if node . class == Nokogiri :: XML :: Text
if ! node . parent || ( node . parent . name != 'pre' && node . parent . name != 'code' )
content = node . content
if content
if content != ' ' && content != " \n "
content . gsub! ( / [[:space:]]+ / , ' ' )
end
if node . previous
if node . previous . name == 'div' || node . previous . name == 'p'
content . strip!
end
2017-03-21 14:21:01 +00:00
elsif node . parent && ! node . previous && ( ! node . next || node . next . name == 'div' || node . next . name == 'p' || node . next . name == 'br' )
2017-03-10 06:49:01 +00:00
if ( node . parent . name == 'div' || node . parent . name == 'p' ) && content != ' ' && content != " \n "
content . strip!
end
end
node . content = content
end
end
end
end
2017-03-21 08:02:15 +00:00
Loofah . fragment ( string ) . scrub! ( scrubber_cleanup ) . to_s
2017-03-10 06:49:01 +00:00
end
def self . add_link ( content , urls , node )
2017-11-21 07:24:03 +00:00
if urls . blank?
2017-03-10 06:49:01 +00:00
text = Nokogiri :: XML :: Text . new ( content , node . document )
node . add_next_sibling ( text )
return
end
url = urls . shift
if content =~ / ^(.*) #{ Regexp . quote ( url ) } (.*)$ /mx
pre = $1
post = $2
2017-11-23 08:09:44 +00:00
if url . match? ( / ^www /i )
2017-03-10 06:49:01 +00:00
url = " http:// #{ url } "
end
a = Nokogiri :: XML :: Node . new 'a' , node . document
a [ 'href' ] = url
2017-04-17 23:20:47 +00:00
a [ 'rel' ] = 'nofollow noreferrer noopener'
2017-03-10 06:49:01 +00:00
a [ 'target' ] = '_blank'
a . content = url
if node . class != Nokogiri :: XML :: Text
text = Nokogiri :: XML :: Text . new ( pre , node . document )
node . add_next_sibling ( text ) . add_next_sibling ( a )
return if post . blank?
2018-10-09 06:17:41 +00:00
2017-03-10 06:49:01 +00:00
add_link ( post , urls , a )
return
end
node . content = pre
node . add_next_sibling ( a )
return if post . blank?
2018-10-09 06:17:41 +00:00
2017-03-10 06:49:01 +00:00
add_link ( post , urls , a )
end
2017-11-23 08:09:44 +00:00
true
2017-03-10 06:49:01 +00:00
end
2017-03-22 06:55:33 +00:00
def self . html_decode ( string )
string . gsub ( '&' , '&' ) . gsub ( '<' , '<' ) . gsub ( '>' , '>' ) . gsub ( '"' , '"' ) . gsub ( ' ' , ' ' )
end
2018-06-07 10:38:09 +00:00
def self . cleanup_target ( string , ** options )
cleaned_string = CGI . unescape ( string ) . utf8_encode ( fallback : :read_as_sanitized_binary )
2018-06-12 07:39:17 +00:00
cleaned_string = cleaned_string . gsub ( / [[:space:]] / , '' ) if ! options [ :keep_spaces ]
2018-06-07 10:38:09 +00:00
cleaned_string = cleaned_string . strip
. delete ( " \t \n \r \ u0000 " )
. gsub ( %r{ / \ *.*? \ */ } , '' )
. gsub ( / <!--.*?--> / , '' )
2018-05-24 12:46:22 +00:00
sanitize_attachment_disposition ( cleaned_string )
end
def self . sanitize_attachment_disposition ( url )
2018-11-26 14:57:40 +00:00
@fqdn || = Setting . get ( 'fqdn' )
2018-05-24 12:46:22 +00:00
uri = URI ( url )
2018-11-26 14:57:40 +00:00
if uri . host == @fqdn && uri . query . present?
2018-06-07 10:38:09 +00:00
params = CGI . parse ( uri . query || '' )
. tap { | p | p . merge! ( 'disposition' = > 'attachment' ) if p . include? ( 'disposition' ) }
uri . query = URI . encode_www_form ( params )
2018-05-24 12:46:22 +00:00
end
uri . to_s
2018-09-24 09:21:30 +00:00
rescue
2018-05-24 12:46:22 +00:00
url
2017-03-10 06:49:01 +00:00
end
def self . url_same? ( url_new , url_old )
2018-06-01 11:32:59 +00:00
url_new = CGI . unescape ( url_new . to_s ) . utf8_encode ( fallback : :read_as_sanitized_binary ) . downcase . gsub ( %r{ /$ } , '' ) . gsub ( / [[:space:]]| \ t| \ n| \ r / , '' ) . strip
url_old = CGI . unescape ( url_old . to_s ) . utf8_encode ( fallback : :read_as_sanitized_binary ) . downcase . gsub ( %r{ /$ } , '' ) . gsub ( / [[:space:]]| \ t| \ n| \ r / , '' ) . strip
2017-03-22 06:55:33 +00:00
url_new = html_decode ( url_new ) . sub ( '/?' , '?' )
url_old = html_decode ( url_old ) . sub ( '/?' , '?' )
2017-03-10 06:49:01 +00:00
return true if url_new == url_old
2017-11-23 08:09:44 +00:00
return true if url_old == " http:// #{ url_new } "
return true if url_new == " http:// #{ url_old } "
return true if url_old == " https:// #{ url_new } "
return true if url_new == " https:// #{ url_old } "
2018-10-09 06:17:41 +00:00
2017-03-10 06:49:01 +00:00
false
2017-02-02 18:49:34 +00:00
end
2017-03-17 05:27:50 +00:00
= begin
reolace inline images with cid images
string = HtmlSanitizer . replace_inline_images ( article . body )
= end
def self . replace_inline_images ( string , prefix = rand ( 999_999_999 ) )
2018-11-26 14:57:40 +00:00
fqdn = Setting . get ( 'fqdn' )
2017-03-17 05:27:50 +00:00
attachments_inline = [ ]
2018-06-25 09:25:30 +00:00
filename_counter = 0
2017-03-17 05:27:50 +00:00
scrubber = Loofah :: Scrubber . new do | node |
if node . name == 'img'
if node [ 'src' ] && node [ 'src' ] =~ %r{ ^(data:image/(jpeg|png);base64,.+?)$ }i
2018-06-25 09:25:30 +00:00
filename_counter += 1
2017-03-17 05:27:50 +00:00
file_attributes = StaticAssets . data_url_attributes ( $1 )
2018-11-26 14:57:40 +00:00
cid = " #{ prefix } . #{ rand ( 999_999_999 ) } @ #{ fqdn } "
2018-06-25 09:25:30 +00:00
filename = cid
if file_attributes [ :file_extention ] . present?
filename = " image #{ filename_counter } . #{ file_attributes [ :file_extention ] } "
end
2017-03-17 05:27:50 +00:00
attachment = {
2018-12-19 17:31:51 +00:00
data : file_attributes [ :content ] ,
filename : filename ,
2017-03-17 05:27:50 +00:00
preferences : {
2018-12-19 17:31:51 +00:00
'Content-Type' = > file_attributes [ :mime_type ] ,
'Mime-Type' = > file_attributes [ :mime_type ] ,
'Content-ID' = > cid ,
2017-03-17 05:27:50 +00:00
'Content-Disposition' = > 'inline' ,
} ,
}
attachments_inline . push attachment
node [ 'src' ] = " cid: #{ cid } "
end
Loofah :: Scrubber :: STOP
end
end
[ Loofah . fragment ( string ) . scrub! ( scrubber ) . to_s , attachments_inline ]
end
= begin
satinize style of img tags
string = HtmlSanitizer . dynamic_image_size ( article . body )
= end
def self . dynamic_image_size ( string )
scrubber = Loofah :: Scrubber . new do | node |
if node . name == 'img'
if node [ 'src' ]
style = 'max-width:100%;'
if node [ 'style' ]
pears = node [ 'style' ] . downcase . gsub ( / \ t| \ n| \ r / , '' ) . split ( ';' )
2017-10-01 12:25:52 +00:00
pears . each do | local_pear |
2017-03-17 05:27:50 +00:00
prop = local_pear . split ( ':' )
next if ! prop [ 0 ]
2018-10-09 06:17:41 +00:00
2017-03-17 05:27:50 +00:00
key = prop [ 0 ] . strip
if key == 'height'
key = 'max-height'
end
style += " #{ key } : #{ prop [ 1 ] } ; "
2017-10-01 12:25:52 +00:00
end
2017-03-17 05:27:50 +00:00
end
node [ 'style' ] = style
end
Loofah :: Scrubber :: STOP
end
end
Loofah . fragment ( string ) . scrub! ( scrubber ) . to_s
end
2017-03-10 06:49:01 +00:00
private_class_method :cleanup_target
2018-06-07 10:38:09 +00:00
private_class_method :sanitize_attachment_disposition
2017-03-10 06:49:01 +00:00
private_class_method :add_link
private_class_method :url_same?
2017-03-22 06:55:33 +00:00
private_class_method :html_decode
2017-02-02 18:49:34 +00:00
2017-02-01 11:48:50 +00:00
end