Applied rubocop cop 'Style/SpaceInsideStringInterpolation'.
This commit is contained in:
parent
f259dd3459
commit
54fd62487c
29 changed files with 74 additions and 74 deletions
|
@ -38,7 +38,7 @@ class LongPollingController < ApplicationController
|
|||
spool = Sessions.spool_list( params['data']['timestamp'], current_user.id )
|
||||
spool.each { |item|
|
||||
if item[:type] == 'direct'
|
||||
log "send spool to (user_id=#{ current_user.id })", client_id
|
||||
log "send spool to (user_id=#{current_user.id})", client_id
|
||||
Sessions.send( client_id, item[:message] )
|
||||
else
|
||||
log 'send spool', client_id
|
||||
|
@ -159,6 +159,6 @@ class LongPollingController < ApplicationController
|
|||
end
|
||||
|
||||
def log( data, client_id = '-' )
|
||||
logger.info "client(#{ client_id }) #{ data }"
|
||||
logger.info "client(#{client_id}) #{data}"
|
||||
end
|
||||
end
|
||||
|
|
|
@ -21,7 +21,7 @@ curl http://localhost/api/v1/rss_fetch.json -v -u #{login}:#{password} -H "Conte
|
|||
def fetch
|
||||
items = Rss.fetch(params[:url], params[:limit])
|
||||
if items.nil?
|
||||
render json: { message: "failed to fetch #{ params[:url] }", status: :unprocessable_entity }
|
||||
render json: { message: "failed to fetch #{params[:url]}", status: :unprocessable_entity }
|
||||
return
|
||||
end
|
||||
render json: { items: items }
|
||||
|
|
|
@ -44,7 +44,7 @@ class TicketOverviewsController < ApplicationController
|
|||
array: true,
|
||||
)
|
||||
if !overview
|
||||
render json: { error: "No such view #{ params[:view] }!" }, status: :unprocessable_entity
|
||||
render json: { error: "No such view #{params[:view]}!" }, status: :unprocessable_entity
|
||||
return
|
||||
end
|
||||
|
||||
|
|
|
@ -524,7 +524,7 @@ class OwnModel < ApplicationModel
|
|||
|
||||
# return if we run import mode
|
||||
return if Setting.get('import_mode')
|
||||
logger.debug "#{ self.class.name }.find(#{ id }) notify created " + created_at.to_s
|
||||
logger.debug "#{self.class.name}.find(#{id}) notify created " + created_at.to_s
|
||||
class_name = self.class.name
|
||||
class_name.gsub!(/::/, '')
|
||||
Sessions.broadcast(
|
||||
|
@ -553,7 +553,7 @@ class OwnModel < ApplicationModel
|
|||
|
||||
# return if we run import mode
|
||||
return if Setting.get('import_mode')
|
||||
logger.debug "#{ self.class.name }.find(#{ id }) notify UPDATED " + updated_at.to_s
|
||||
logger.debug "#{self.class.name}.find(#{id}) notify UPDATED " + updated_at.to_s
|
||||
class_name = self.class.name
|
||||
class_name.gsub!(/::/, '')
|
||||
Sessions.broadcast(
|
||||
|
@ -582,7 +582,7 @@ class OwnModel < ApplicationModel
|
|||
|
||||
# return if we run import mode
|
||||
return if Setting.get('import_mode')
|
||||
logger.debug "#{ self.class.name }.find(#{ id }) notify TOUCH " + updated_at.to_s
|
||||
logger.debug "#{self.class.name}.find(#{id}) notify TOUCH " + updated_at.to_s
|
||||
class_name = self.class.name
|
||||
class_name.gsub!(/::/, '')
|
||||
Sessions.broadcast(
|
||||
|
@ -610,7 +610,7 @@ class OwnModel < ApplicationModel
|
|||
|
||||
# return if we run import mode
|
||||
return if Setting.get('import_mode')
|
||||
logger.debug "#{ self.class.name }.find(#{ id }) notify DESTOY " + updated_at.to_s
|
||||
logger.debug "#{self.class.name}.find(#{id}) notify DESTOY " + updated_at.to_s
|
||||
class_name = self.class.name
|
||||
class_name.gsub!(/::/, '')
|
||||
Sessions.broadcast(
|
||||
|
|
|
@ -288,7 +288,7 @@ return all avatars of an user
|
|||
data = avatar.attributes
|
||||
if avatar.store_resize_id
|
||||
file = Store.find(avatar.store_resize_id)
|
||||
data['content'] = "data:#{ file.preferences['Mime-Type'] };base64,#{ Base64.strict_encode64( file.content ) }"
|
||||
data['content'] = "data:#{file.preferences['Mime-Type']};base64,#{Base64.strict_encode64( file.content )}"
|
||||
end
|
||||
avatar_list.push data
|
||||
end
|
||||
|
|
|
@ -19,10 +19,10 @@ module Channel::Filter::Database
|
|||
scan = mail[ key.downcase.to_sym ].scan(/#{value}/i)
|
||||
end
|
||||
if match && scan[0]
|
||||
Rails.logger.info " matching #{ key.downcase }:'#{ mail[ key.downcase.to_sym ] }' on #{value}"
|
||||
Rails.logger.info " matching #{key.downcase}:'#{mail[ key.downcase.to_sym ]}' on #{value}"
|
||||
match = true
|
||||
else
|
||||
Rails.logger.info " is not matching #{ key.downcase }:'#{ mail[ key.downcase.to_sym ] }' on #{value}"
|
||||
Rails.logger.info " is not matching #{key.downcase}:'#{mail[ key.downcase.to_sym ]}' on #{value}"
|
||||
match = false
|
||||
end
|
||||
rescue => e
|
||||
|
@ -36,7 +36,7 @@ module Channel::Filter::Database
|
|||
next if !match
|
||||
|
||||
filter[:perform].each {|key, value|
|
||||
Rails.logger.info " perform '#{ key.downcase }' = '#{value}'"
|
||||
Rails.logger.info " perform '#{key.downcase}' = '#{value}'"
|
||||
mail[ key.downcase.to_sym ] = value
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ class Scheduler < ApplicationModel
|
|||
begin
|
||||
ActiveRecord::Base.connection.reconnect!
|
||||
rescue => e
|
||||
logger.error "Can't reconnect to database #{ e.inspect }"
|
||||
logger.error "Can't reconnect to database #{e.inspect}"
|
||||
end
|
||||
|
||||
# read/load jobs and check if it is alredy started
|
||||
|
@ -89,13 +89,13 @@ class Scheduler < ApplicationModel
|
|||
logger.info "execute #{job.method} (try_count #{try_count})..."
|
||||
eval job.method() # rubocop:disable Lint/Eval
|
||||
rescue => e
|
||||
logger.error "execute #{job.method} (try_count #{try_count}) exited with error #{ e.inspect }"
|
||||
logger.error "execute #{job.method} (try_count #{try_count}) exited with error #{e.inspect}"
|
||||
|
||||
# reconnect in case db connection is lost
|
||||
begin
|
||||
ActiveRecord::Base.connection.reconnect!
|
||||
rescue => e
|
||||
logger.error "Can't reconnect to database #{ e.inspect }"
|
||||
logger.error "Can't reconnect to database #{e.inspect}"
|
||||
end
|
||||
|
||||
try_run_max = 10
|
||||
|
|
|
@ -142,7 +142,7 @@ returns
|
|||
def content
|
||||
file = Store::File.find_by( id: store_file_id )
|
||||
if !file
|
||||
fail "No such file #{ store_file_id }!"
|
||||
fail "No such file #{store_file_id}!"
|
||||
end
|
||||
file.content
|
||||
end
|
||||
|
@ -150,7 +150,7 @@ returns
|
|||
def provider
|
||||
file = Store::File.find_by( id: store_file_id )
|
||||
if !file
|
||||
fail "No such file #{ store_file_id }!"
|
||||
fail "No such file #{store_file_id}!"
|
||||
end
|
||||
file.provider
|
||||
end
|
||||
|
|
|
@ -17,7 +17,7 @@ class Store
|
|||
if !adapter_name
|
||||
fail 'Missing storage_provider setting option'
|
||||
end
|
||||
adapter = load_adapter( "Store::Provider::#{ adapter_name }" )
|
||||
adapter = load_adapter( "Store::Provider::#{adapter_name}" )
|
||||
adapter.add( data, sha )
|
||||
file = Store::File.create(
|
||||
provider: adapter_name,
|
||||
|
@ -29,7 +29,7 @@ class Store
|
|||
|
||||
# read content
|
||||
def content
|
||||
adapter = self.class.load_adapter("Store::Provider::#{ provider }")
|
||||
adapter = self.class.load_adapter("Store::Provider::#{provider}")
|
||||
if sha
|
||||
c = adapter.get( sha )
|
||||
else
|
||||
|
@ -62,8 +62,8 @@ class Store
|
|||
# e. g. Store::File.move('File', 'DB')
|
||||
# e. g. Store::File.move('DB', 'File')
|
||||
def self.move(source, target)
|
||||
adapter_source = load_adapter("Store::Provider::#{ source }")
|
||||
adapter_target = load_adapter("Store::Provider::#{ target }")
|
||||
adapter_source = load_adapter("Store::Provider::#{source}")
|
||||
adapter_target = load_adapter("Store::Provider::#{target}")
|
||||
|
||||
Store::File.all.each {|item|
|
||||
next if item.provider == target
|
||||
|
@ -86,7 +86,7 @@ class Store
|
|||
private
|
||||
|
||||
def destroy_provider
|
||||
adapter = self.class.load_adapter("Store::Provider::#{ provider }")
|
||||
adapter = self.class.load_adapter("Store::Provider::#{provider}")
|
||||
adapter.delete( sha )
|
||||
end
|
||||
end
|
||||
|
|
|
@ -9,7 +9,7 @@ class Store::Provider::File
|
|||
# install file
|
||||
permission = '600'
|
||||
if !File.exist?( get_locaton(sha) )
|
||||
Rails.logger.debug "storge write '#{ get_locaton(sha) }' (#{permission})"
|
||||
Rails.logger.debug "storge write '#{get_locaton(sha)}' (#{permission})"
|
||||
file = File.new( get_locaton(sha), 'wb' )
|
||||
file.write( data )
|
||||
file.close
|
||||
|
@ -19,7 +19,7 @@ class Store::Provider::File
|
|||
# check sha
|
||||
local_sha = Digest::SHA256.hexdigest( get(sha) )
|
||||
if sha != local_sha
|
||||
fail "ERROR: Corrupt file in fs #{ get_locaton(sha) }, sha should be #{sha} but is #{local_sha}"
|
||||
fail "ERROR: Corrupt file in fs #{get_locaton(sha)}, sha should be #{sha} but is #{local_sha}"
|
||||
end
|
||||
|
||||
true
|
||||
|
@ -27,9 +27,9 @@ class Store::Provider::File
|
|||
|
||||
# read file from fs
|
||||
def self.get(sha)
|
||||
Rails.logger.debug "read from fs #{ get_locaton(sha) }"
|
||||
Rails.logger.debug "read from fs #{get_locaton(sha)}"
|
||||
if !File.exist?( get_locaton(sha) )
|
||||
fail "ERROR: No such file #{ get_locaton(sha) }"
|
||||
fail "ERROR: No such file #{get_locaton(sha)}"
|
||||
end
|
||||
data = File.open( get_locaton(sha), 'rb' )
|
||||
content = data.read
|
||||
|
@ -37,7 +37,7 @@ class Store::Provider::File
|
|||
# check sha
|
||||
local_sha = Digest::SHA256.hexdigest( content )
|
||||
if local_sha != sha
|
||||
fail "ERROR: Corrupt file in fs #{ get_locaton(sha) }, sha should be #{sha} but is #{local_sha}"
|
||||
fail "ERROR: Corrupt file in fs #{get_locaton(sha)}, sha should be #{sha} but is #{local_sha}"
|
||||
end
|
||||
content
|
||||
end
|
||||
|
@ -45,7 +45,7 @@ class Store::Provider::File
|
|||
# unlink file from fs
|
||||
def self.delete(sha)
|
||||
if File.exist?( get_locaton(sha) )
|
||||
Rails.logger.info "storge remove '#{ get_locaton(sha) }'"
|
||||
Rails.logger.info "storge remove '#{get_locaton(sha)}'"
|
||||
File.delete( get_locaton(sha) )
|
||||
end
|
||||
end
|
||||
|
|
|
@ -88,7 +88,7 @@ returns
|
|||
}
|
||||
|
||||
if data[:view] && !overview_selected
|
||||
fail "No such view '#{ data[:view] }'"
|
||||
fail "No such view '#{data[:view]}'"
|
||||
end
|
||||
|
||||
# sortby
|
||||
|
|
|
@ -17,7 +17,7 @@ returns
|
|||
=end
|
||||
|
||||
def search_index_data
|
||||
attributes = { 'fullname' => "#{ self['firstname'] } #{ self['lastname'] }" }
|
||||
attributes = { 'fullname' => "#{self['firstname']} #{self['lastname']}" }
|
||||
%w(login firstname lastname phone email address city country note created_at).each { |key|
|
||||
if self[key] && (!self.respond_to?('empty?') || !self[key].empty?)
|
||||
attributes[key] = self[key]
|
||||
|
|
|
@ -57,7 +57,7 @@ class String
|
|||
return if ActiveRecord::Base.connection_config[:adapter] != 'mysql2'
|
||||
each_char.select {|c|
|
||||
if c.bytes.count > 3
|
||||
Rails.logger.warn "strip out 4 bytes utf8 chars '#{c}' of '#{ self }'"
|
||||
Rails.logger.warn "strip out 4 bytes utf8 chars '#{c}' of '#{self}'"
|
||||
next
|
||||
end
|
||||
c
|
||||
|
|
|
@ -295,7 +295,7 @@ returns
|
|||
|
||||
def self.send( client_id, data )
|
||||
path = "#{@path}/#{client_id}/"
|
||||
filename = "send-#{ Time.now.utc.to_f }"
|
||||
filename = "send-#{Time.now.utc.to_f}"
|
||||
check = true
|
||||
count = 0
|
||||
while check
|
||||
|
@ -458,7 +458,7 @@ returns
|
|||
begin
|
||||
message_parsed = JSON.parse( spool['msg'] )
|
||||
rescue => e
|
||||
log('error', "can't parse spool message: #{ message }, #{ e.inspect }")
|
||||
log('error', "can't parse spool message: #{message}, #{e.inspect}")
|
||||
next
|
||||
end
|
||||
|
||||
|
@ -579,13 +579,13 @@ returns
|
|||
begin
|
||||
Sessions::Client.new(client_id)
|
||||
rescue => e
|
||||
log('error', "thread_client #{client_id} exited with error #{ e.inspect }")
|
||||
log('error', "thread_client #{client_id} exited with error #{e.inspect}")
|
||||
log('error', e.backtrace.join("\n ") )
|
||||
sleep 10
|
||||
begin
|
||||
ActiveRecord::Base.connection_pool.release_connection
|
||||
rescue => e
|
||||
log('error', "Can't reconnect to database #{ e.inspect }")
|
||||
log('error', "Can't reconnect to database #{e.inspect}")
|
||||
end
|
||||
|
||||
try_run_max = 10
|
||||
|
|
|
@ -29,7 +29,7 @@ class Sessions::Backend::ActivityStream
|
|||
end
|
||||
|
||||
def client_key
|
||||
"as::load::#{ self.class }::#{ @user.id }::#{ @client_id }"
|
||||
"as::load::#{self.class}::#{@user.id}::#{@client_id}"
|
||||
end
|
||||
|
||||
def push
|
||||
|
@ -53,7 +53,7 @@ class Sessions::Backend::ActivityStream
|
|||
}
|
||||
end
|
||||
|
||||
@client.log "push activity_stream #{ data.first.class } for user #{ @user.id }"
|
||||
@client.log "push activity_stream #{data.first.class} for user #{@user.id}"
|
||||
@client.send(
|
||||
event: 'activity_stream_rebuild',
|
||||
collection: 'activity_stream',
|
||||
|
|
|
@ -16,7 +16,7 @@ class Sessions::Backend::Collections::Base
|
|||
end
|
||||
|
||||
def client_key
|
||||
"collections::load::#{ self.class }::#{ @user.id }::#{ @client_id }"
|
||||
"collections::load::#{self.class}::#{@user.id}::#{@client_id}"
|
||||
end
|
||||
|
||||
def push
|
||||
|
@ -77,13 +77,13 @@ class Sessions::Backend::Collections::Base
|
|||
assets: assets,
|
||||
}
|
||||
end
|
||||
@client.log "push assets for push_collection #{ items.first.class } for user #{ @user.id }"
|
||||
@client.log "push assets for push_collection #{items.first.class} for user #{@user.id}"
|
||||
@client.send(
|
||||
data: assets,
|
||||
event: [ 'loadAssets' ],
|
||||
)
|
||||
|
||||
@client.log "push push_collection #{ items.first.class } for user #{ @user.id }"
|
||||
@client.log "push push_collection #{items.first.class} for user #{@user.id}"
|
||||
@client.send(
|
||||
event: 'resetCollection',
|
||||
data: {
|
||||
|
|
|
@ -10,7 +10,7 @@ class Sessions::Backend::Rss
|
|||
end
|
||||
|
||||
def collection_key
|
||||
"rss::load::#{ self.class }::#{ @user.id }"
|
||||
"rss::load::#{self.class}::#{@user.id}"
|
||||
end
|
||||
|
||||
def load
|
||||
|
@ -29,7 +29,7 @@ class Sessions::Backend::Rss
|
|||
end
|
||||
|
||||
def client_key
|
||||
"rss::load::#{ self.class }::#{ @user.id }::#{ @client_id }"
|
||||
"rss::load::#{self.class}::#{@user.id}::#{@client_id}"
|
||||
end
|
||||
|
||||
def push
|
||||
|
|
|
@ -27,7 +27,7 @@ class Sessions::Backend::TicketCreate
|
|||
end
|
||||
|
||||
def client_key
|
||||
"as::load::#{ self.class }::#{ @user.id }::#{ @client_id }"
|
||||
"as::load::#{self.class}::#{@user.id}::#{@client_id}"
|
||||
end
|
||||
|
||||
def push
|
||||
|
@ -58,7 +58,7 @@ class Sessions::Backend::TicketCreate
|
|||
}
|
||||
end
|
||||
|
||||
@client.log "push ticket_create for user #{ @user.id }"
|
||||
@client.log "push ticket_create for user #{@user.id}"
|
||||
@client.send(
|
||||
collection: 'ticket_create_attributes',
|
||||
data: data,
|
||||
|
|
|
@ -28,7 +28,7 @@ class Sessions::Backend::TicketOverviewIndex
|
|||
end
|
||||
|
||||
def client_key
|
||||
"as::load::#{ self.class }::#{ @user.id }::#{ @client_id }"
|
||||
"as::load::#{self.class}::#{@user.id}::#{@client_id}"
|
||||
end
|
||||
|
||||
def push
|
||||
|
@ -56,7 +56,7 @@ class Sessions::Backend::TicketOverviewIndex
|
|||
}
|
||||
end
|
||||
|
||||
@client.log "push overview_index for user #{ @user.id }"
|
||||
@client.log "push overview_index for user #{@user.id}"
|
||||
@client.send(
|
||||
event: ['ticket_overview_index'],
|
||||
data: data,
|
||||
|
|
|
@ -38,7 +38,7 @@ class Sessions::Backend::TicketOverviewList
|
|||
end
|
||||
|
||||
def client_key
|
||||
"as::load::#{ self.class }::#{ @user.id }::#{ @client_id }"
|
||||
"as::load::#{self.class}::#{@user.id}::#{@client_id}"
|
||||
end
|
||||
|
||||
def push
|
||||
|
@ -102,7 +102,7 @@ class Sessions::Backend::TicketOverviewList
|
|||
results.push result
|
||||
else
|
||||
|
||||
@client.log "push overview_list for user #{ @user.id }"
|
||||
@client.log "push overview_list for user #{@user.id}"
|
||||
|
||||
# send update to browser
|
||||
@client.send(
|
||||
|
|
|
@ -71,6 +71,6 @@ class Sessions::Client
|
|||
end
|
||||
|
||||
def log( msg )
|
||||
Rails.logger.debug "client(#{ @client_id }) #{ msg }"
|
||||
Rails.logger.debug "client(#{@client_id}) #{msg}"
|
||||
end
|
||||
end
|
||||
|
|
|
@ -59,7 +59,7 @@ if ARGV[0] != 'start' && ARGV[0] != 'stop'
|
|||
exit
|
||||
end
|
||||
|
||||
puts "Starting websocket server on #{ @options[:b] }:#{ @options[:p] } (secure:#{ @options[:s] },pid:#{@options[:i]})"
|
||||
puts "Starting websocket server on #{@options[:b]}:#{@options[:p]} (secure:#{@options[:s]},pid:#{@options[:i]})"
|
||||
#puts options.inspect
|
||||
|
||||
if ARGV[0] == 'stop'
|
||||
|
@ -119,11 +119,11 @@ EventMachine.run {
|
|||
ws.onmessage { |msg|
|
||||
|
||||
client_id = ws.object_id.to_s
|
||||
log 'debug', "received: #{ msg } ", client_id
|
||||
log 'debug', "received: #{msg} ", client_id
|
||||
begin
|
||||
data = JSON.parse(msg)
|
||||
rescue => e
|
||||
log 'error', "can't parse message: #{ msg }, #{ e.inspect }", client_id
|
||||
log 'error', "can't parse message: #{msg}, #{e.inspect}", client_id
|
||||
next
|
||||
end
|
||||
|
||||
|
@ -151,7 +151,7 @@ EventMachine.run {
|
|||
|
||||
# create new msg to push to client
|
||||
if item[:type] == 'direct'
|
||||
log 'notice', "send spool to (user_id=#{ @clients[client_id][:session]['id'] })", client_id
|
||||
log 'notice', "send spool to (user_id=#{@clients[client_id][:session]['id']})", client_id
|
||||
websocket_send(client_id, item[:message])
|
||||
else
|
||||
log 'notice', 'send spool', client_id
|
||||
|
@ -198,13 +198,13 @@ EventMachine.run {
|
|||
# broadcast to recipient list
|
||||
if data['recipient']
|
||||
if data['recipient'].class != Hash
|
||||
log 'error', "recipient attribute isn't a hash '#{ data['recipient'].inspect }'"
|
||||
log 'error', "recipient attribute isn't a hash '#{data['recipient'].inspect}'"
|
||||
else
|
||||
if !data['recipient'].key?('user_id')
|
||||
log 'error', "need recipient.user_id attribute '#{ data['recipient'].inspect }'"
|
||||
log 'error', "need recipient.user_id attribute '#{data['recipient'].inspect}'"
|
||||
else
|
||||
if data['recipient']['user_id'].class != Array
|
||||
log 'error', "recipient.user_id attribute isn't an array '#{ data['recipient']['user_id'].inspect }'"
|
||||
log 'error', "recipient.user_id attribute isn't an array '#{data['recipient']['user_id'].inspect}'"
|
||||
else
|
||||
data['recipient']['user_id'].each { |user_id|
|
||||
|
||||
|
@ -251,7 +251,7 @@ EventMachine.run {
|
|||
EventMachine.add_periodic_timer(20) {
|
||||
|
||||
# websocket
|
||||
log 'notice', "Status: websocket clients: #{ @clients.size }"
|
||||
log 'notice', "Status: websocket clients: #{@clients.size}"
|
||||
@clients.each { |client_id, _client|
|
||||
log 'notice', 'working...', client_id
|
||||
}
|
||||
|
@ -263,7 +263,7 @@ EventMachine.run {
|
|||
next if client[:meta][:type] == 'websocket'
|
||||
clients = clients + 1
|
||||
}
|
||||
log 'notice', "Status: ajax clients: #{ clients }"
|
||||
log 'notice', "Status: ajax clients: #{clients}"
|
||||
client_list.each {|client_id, client|
|
||||
next if client[:meta][:type] == 'websocket'
|
||||
log 'notice', 'working...', client_id
|
||||
|
@ -346,7 +346,7 @@ EventMachine.run {
|
|||
if !@options[:v]
|
||||
return if level == 'debug'
|
||||
end
|
||||
puts "#{Time.now.utc.iso8601}:client(#{ client_id }) #{ data }"
|
||||
puts "#{Time.now.utc.iso8601}:client(#{client_id}) #{data}"
|
||||
#puts "#{Time.now.utc.iso8601}:#{ level }:client(#{ client_id }) #{ data }"
|
||||
end
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ class AgentUserManageTest < TestCase
|
|||
customer_user_email = 'customer-test-' + rand(999_999).to_s + '@example.com'
|
||||
firstname = 'Customer Firstname'
|
||||
lastname = 'Customer Lastname'
|
||||
fullname = "#{ firstname } #{ lastname } <#{ customer_user_email }>"
|
||||
fullname = "#{firstname} #{lastname} <#{customer_user_email}>"
|
||||
|
||||
@browser = browser_instance
|
||||
login(
|
||||
|
|
|
@ -1095,7 +1095,7 @@ wait untill text in selector disabppears
|
|||
sleep 1
|
||||
}
|
||||
screenshot( browser: instance, comment: 'ticket_create_failed' )
|
||||
fail "ticket creation failed, can't get zoom url (current url is '#{ instance.current_url }')"
|
||||
fail "ticket creation failed, can't get zoom url (current url is '#{instance.current_url}')"
|
||||
end
|
||||
|
||||
=begin
|
||||
|
|
|
@ -385,7 +385,7 @@ class ActivityStreamTest < ActiveSupport::TestCase
|
|||
assert_equal( check_item[:o_id], item['o_id'] )
|
||||
else
|
||||
if check_item[:object] == item['object'] && check_item[:type] == item['type'] && check_item[:o_id] == item['o_id']
|
||||
assert( false, "entry should not exist #{ item['object'] }/#{ item['type'] }/#{ item['o_id'] }" )
|
||||
assert( false, "entry should not exist #{item['object']}/#{item['type']}/#{item['o_id']}" )
|
||||
end
|
||||
end
|
||||
}
|
||||
|
|
|
@ -337,7 +337,7 @@ class OnlineNotificationTest < ActiveSupport::TestCase
|
|||
end
|
||||
}
|
||||
#puts "--- #{onine_notifications.inspect}"
|
||||
assert( hit, "online notification exists not #{ check_item.inspect }" )
|
||||
assert( hit, "online notification exists not #{check_item.inspect}" )
|
||||
}
|
||||
end
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ class StoreTest < ActiveSupport::TestCase
|
|||
|
||||
# sha check
|
||||
sha_new = Digest::SHA256.hexdigest( attachments[0].content )
|
||||
assert_equal( sha, sha_new, "check file #{ file[:filename] }")
|
||||
assert_equal( sha, sha_new, "check file #{file[:filename]}")
|
||||
|
||||
# filename check
|
||||
assert_equal( file[:filename], attachments[0].filename )
|
||||
|
@ -75,7 +75,7 @@ class StoreTest < ActiveSupport::TestCase
|
|||
|
||||
# sha check
|
||||
sha_new = Digest::SHA256.hexdigest( attachments[0].content )
|
||||
assert_equal( sha, sha_new, "check file #{ file[:filename] }")
|
||||
assert_equal( sha, sha_new, "check file #{file[:filename]}")
|
||||
|
||||
# filename check
|
||||
assert_equal( file[:filename], attachments[0].filename )
|
||||
|
@ -101,7 +101,7 @@ class StoreTest < ActiveSupport::TestCase
|
|||
|
||||
# sha check
|
||||
sha_new = Digest::SHA256.hexdigest( attachments[0].content )
|
||||
assert_equal( sha, sha_new, "check file #{ file[:filename] }")
|
||||
assert_equal( sha, sha_new, "check file #{file[:filename]}")
|
||||
|
||||
# filename check
|
||||
assert_equal( file[:filename], attachments[0].filename )
|
||||
|
|
|
@ -106,9 +106,9 @@ class TagTest < ActiveSupport::TestCase
|
|||
list = Tag.tag_list( tags )
|
||||
test[:verify][:items].each {|key, value|
|
||||
if value == true
|
||||
assert( list.include?( key ), "Tag verify - should exists but exists #{ key }")
|
||||
assert( list.include?( key ), "Tag verify - should exists but exists #{key}")
|
||||
else
|
||||
assert( !list.include?( key ), "Tag verify - exists but should not #{ key }")
|
||||
assert( !list.include?( key ), "Tag verify - exists but should not #{key}")
|
||||
end
|
||||
}
|
||||
}
|
||||
|
|
|
@ -255,15 +255,15 @@ class UserTest < ActiveSupport::TestCase
|
|||
test[:create_verify].each { |key, value|
|
||||
next if key == :image_md5
|
||||
if user.respond_to?( key )
|
||||
assert_equal( value, user.send(key), "create check #{ key } in (#{ test[:name] })" )
|
||||
assert_equal( value, user.send(key), "create check #{key} in (#{test[:name]})" )
|
||||
else
|
||||
assert_equal( value, user[key], "create check #{ key } in (#{ test[:name] })" )
|
||||
assert_equal( value, user[key], "create check #{key} in (#{test[:name]})" )
|
||||
end
|
||||
}
|
||||
if test[:create_verify][:image_md5]
|
||||
file = Avatar.get_by_hash( user.image )
|
||||
file_md5 = Digest::MD5.hexdigest( file.content )
|
||||
assert_equal( test[:create_verify][:image_md5], file_md5, "create avatar md5 check in (#{ test[:name] })" )
|
||||
assert_equal( test[:create_verify][:image_md5], file_md5, "create avatar md5 check in (#{test[:name]})" )
|
||||
end
|
||||
if test[:update]
|
||||
user.update_attributes( test[:update] )
|
||||
|
@ -271,16 +271,16 @@ class UserTest < ActiveSupport::TestCase
|
|||
test[:update_verify].each { |key, value|
|
||||
next if key == :image_md5
|
||||
if user.respond_to?( key )
|
||||
assert_equal( value, user.send(key), "update check #{ key } in (#{ test[:name] })" )
|
||||
assert_equal( value, user.send(key), "update check #{key} in (#{test[:name]})" )
|
||||
else
|
||||
assert_equal( value, user[key], "update check #{ key } in (#{ test[:name] })" )
|
||||
assert_equal( value, user[key], "update check #{key} in (#{test[:name]})" )
|
||||
end
|
||||
}
|
||||
|
||||
if test[:update_verify][:image_md5]
|
||||
file = Avatar.get_by_hash( user.image )
|
||||
file_md5 = Digest::MD5.hexdigest( file.content )
|
||||
assert_equal( test[:update_verify][:image_md5], file_md5, "update avatar md5 check in (#{ test[:name] })" )
|
||||
assert_equal( test[:update_verify][:image_md5], file_md5, "update avatar md5 check in (#{test[:name]})" )
|
||||
end
|
||||
end
|
||||
|
||||
|
|
Loading…
Reference in a new issue