Moved to rails logger.

This commit is contained in:
Martin Edenhofer 2015-05-04 20:58:28 +02:00
parent e98d98e5ff
commit 4365039f49
34 changed files with 171 additions and 232 deletions

View file

@ -99,15 +99,15 @@ class ApplicationController < ActionController::Base
def authentication_check_only
#puts 'authentication_check'
logger.debug 'authentication_check'
session[:request_type] = 1
#puts params.inspect
#puts session.inspect
#puts cookies.inspect
#logger.debug params.inspect
#logger.debug session.inspect
#logger.debug cookies.inspect
# check http basic auth
authenticate_with_http_basic do |username, password|
#puts 'http basic auth check'
logger.debug 'http basic auth check'
session[:request_type] = 2
userdata = User.authenticate( username, password )
@ -165,7 +165,7 @@ class ApplicationController < ActionController::Base
# return auth not ok (no session exists)
if !session[:user_id]
puts 'no valid session, user_id'
logger.debug 'no valid session, user_id'
message = 'no valid session, user_id'
return {
auth: false,
@ -204,7 +204,7 @@ class ApplicationController < ActionController::Base
)
if !user
puts params.inspect
logger.debug params.inspect
response_access_deny
return
end
@ -295,7 +295,6 @@ class ApplicationController < ActionController::Base
model_create_render_item(generic_object)
rescue Exception => e
puts e.message.inspect
logger.error e.message
logger.error e.backtrace.inspect
render json: { error: e.message }, status: :unprocessable_entity

View file

@ -12,7 +12,7 @@ class LongPollingController < ApplicationController
if !client_id
new_connection = true
client_id = client_id_gen
log 'notice', 'new client connection', client_id
log 'new client connection', client_id
end
if !params['data']
params['data'] = {}
@ -29,19 +29,19 @@ class LongPollingController < ApplicationController
# error handling
if params['data']['timestamp']
log 'notice', "request spool data > '#{Time.at( params['data']['timestamp'] )}'", client_id
log "request spool data > '#{Time.at( params['data']['timestamp'] )}'", client_id
else
log 'notice', 'request spool init data', client_id
log 'request spool init data', client_id
end
if current_user
spool = Sessions.spool_list( params['data']['timestamp'], current_user.id )
spool.each { |item|
if item[:type] == 'direct'
log 'notice', "send spool to (user_id=#{ current_user.id })", client_id
log "send spool to (user_id=#{ current_user.id })", client_id
Sessions.send( client_id, item[:message] )
else
log 'notice', 'send spool', client_id
log 'send spool', client_id
Sessions.send( client_id, item[:message] )
end
}
@ -49,7 +49,7 @@ class LongPollingController < ApplicationController
# send spool:sent event to client
sleep 0.2
log 'notice', 'send spool:sent event', client_id
log 'send spool:sent event', client_id
Sessions.send( client_id, { event: 'spool:sent', data: { timestamp: Time.now.utc.to_i } } )
end
@ -60,7 +60,7 @@ class LongPollingController < ApplicationController
if user_id
user = User.find( user_id ).attributes
end
log 'notice', "send auth login (user_id #{user_id})", client_id
log "send auth login (user_id #{user_id})", client_id
Sessions.create( client_id, user, { type: 'ajax' } )
# broadcast
@ -75,17 +75,17 @@ class LongPollingController < ApplicationController
if params['data']['recipient'] && params['data']['recipient']['user_id']
params['data']['recipient']['user_id'].each { |user_id|
if local_client[:user]['id'].to_s == user_id.to_s
log 'notice', "send broadcast from (#{client_id}) to (user_id #{user_id})", local_client_id
log "send broadcast from (#{client_id}) to (user_id #{user_id})", local_client_id
Sessions.send( local_client_id, params['data'] )
end
}
# broadcast every client
else
log 'notice', "send broadcast from (#{client_id})", local_client_id
log "send broadcast from (#{client_id})", local_client_id
Sessions.send( local_client_id, params['data'] )
end
else
log 'notice', 'do not send broadcast to it self', client_id
log 'do not send broadcast to it self', client_id
end
}
end
@ -124,7 +124,7 @@ class LongPollingController < ApplicationController
count = count - 1
queue = Sessions.queue( client_id )
if queue && queue[0]
# puts "send " + queue.inspect + client_id.to_s
logger.debug "send " + queue.inspect + client_id.to_s
render json: queue
return
end
@ -138,8 +138,8 @@ class LongPollingController < ApplicationController
end
end
rescue Exception => e
puts e.inspect
puts e.backtrace
logger.error e.inspect
logger.error e.backtrace
render json: { error: 'Invalid client_id in receive loop!' }, status: :unprocessable_entity
return
end
@ -158,11 +158,7 @@ class LongPollingController < ApplicationController
params[:client_id].to_s
end
def log( level, data, client_id = '-' )
if false
return if level == 'debug'
end
puts "#{Time.zone.now}:client(#{ client_id }) #{ data }"
# puts "#{Time.now}:#{ level }:client(#{ client_id }) #{ data }"
def log( data, client_id = '-' )
logger.info "client(#{ client_id }) #{ data }"
end
end

View file

@ -81,10 +81,8 @@ returns
# only use object attributes
data = {}
self.new.attributes.each {|item|
if params.key?(item[0])
# puts 'use ' + item[0].to_s + '-' + params[item[0]].to_s
data[item[0].to_sym] = params[item[0]]
end
next if !params.key?(item[0])
data[item[0].to_sym] = params[item[0]]
}
# we do want to set this via database
@ -230,10 +228,12 @@ returns
end
def cache_update(o)
# puts 'u ' + self.class.to_s
if self.respond_to?('cache_delete') then self.cache_delete end
# puts 'g ' + group.class.to_s
if o.respond_to?('cache_delete') then o.cache_delete end
if self.respond_to?('cache_delete')
self.cache_delete
end
if o.respond_to?('cache_delete')
o.cache_delete
end
end
def cache_delete
@ -312,11 +312,9 @@ returns
def self.lookup(data)
if data[:id]
# puts "GET- + #{self.to_s}.#{data[:id].to_s}"
cache = self.cache_get( data[:id] )
return cache if cache
# puts "Fillup- + #{self.to_s}.#{data[:id].to_s}"
record = self.where( id: data[:id] ).first
self.cache_set( data[:id], record )
return record
@ -821,7 +819,7 @@ log object create history, if configured - will be executed automatically
def history_create
return if !self.class.history_support_config
#puts 'create ' + self.changes.inspect
#logger.debug 'create ' + self.changes.inspect
self.history_log( 'created', self.created_by_id )
end
@ -853,7 +851,7 @@ log object update history with all updated attributes, if configured - will be e
}
end
self.history_changes_last_done = changes
#puts 'updated ' + self.changes.inspect
#logger.info 'updated ' + self.changes.inspect
return if changes['id'] && !changes['id'][0]
@ -918,7 +916,7 @@ log object update history with all updated attributes, if configured - will be e
id_from: value_id[0],
id_to: value_id[1],
}
#puts "HIST NEW #{self.class.to_s}.find(#{self.id}) #{data.inspect}"
#logger.info "HIST NEW #{self.class.to_s}.find(#{self.id}) #{data.inspect}"
self.history_log( 'updated', self.updated_by_id, data )
}
end
@ -1087,7 +1085,7 @@ check string/varchar size and cut them if needed
if column && limit
current_length = attribute[1].to_s.length
if limit < current_length
logger.info "WARNING: cut string because of database length #{self.class.to_s}.#{attribute[0]}(#{limit} but is #{current_length}:#{attribute[1].to_s})"
logger.warn "WARNING: cut string because of database length #{self.class.to_s}.#{attribute[0]}(#{limit} but is #{current_length}:#{attribute[1].to_s})"
self[ attribute[0] ] = attribute[1][ 0, limit ]
end
end

View file

@ -8,12 +8,12 @@ class Avatar < ApplicationModel
add an avatar based on auto detection (email address)
Avatar.auto_detection(
:object => 'User',
:o_id => user.id,
:url => 'somebody@example.com',
:source => 'web',
:updated_by_id => 1,
:created_by_id => 1,
:object => 'User',
:o_id => user.id,
:url => 'somebody@example.com',
:source => 'web',
:updated_by_id => 1,
:created_by_id => 1,
)
=end
@ -28,7 +28,7 @@ add an avatar based on auto detection (email address)
# dry gravatar lookup
hash = Digest::MD5.hexdigest(data[:url])
url = "http://www.gravatar.com/avatar/#{hash}.jpg?s=160&d=404"
puts "#{data[:url]}: #{url}"
logger.info "Avatar.auto_detection found #{data[:url]}: #{url}"
Avatar.add(
object: data[:object],
@ -46,10 +46,10 @@ add an avatar based on auto detection (email address)
add a avatar
Avatar.add(
:object => 'User',
:o_id => user.id,
:default => true,
:full => {
:object => 'User',
:o_id => user.id,
:default => true,
:full => {
:content => '...',
:mime_type => 'image/png',
},
@ -57,9 +57,9 @@ add a avatar
:content => '...',
:mime_type => 'image/png',
},
:source => 'web',
:updated_by_id => 1,
:created_by_id => 1,
:source => 'web',
:updated_by_id => 1,
:created_by_id => 1,
)
=end
@ -124,12 +124,10 @@ add a avatar
},
)
if !response.success?
#puts "WARNING: Can't fetch '#{self.image_source}' (maybe no avatar available), http code: #{response.code.to_s}"
#raise "Can't fetch '#{self.image_source}', http code: #{response.code.to_s}"
# @TODO remove comment and log instead
logger.info "Can't fetch '#{data[:url]}' (maybe no avatar available), http code: #{response.code}"
return
end
#puts "NOTICE: Fetch '#{self.image_source}', http code: #{response.code.to_s}"
logger.info "Fetch '#{data[:url]}', http code: #{response.code}"
mime_type = 'image'
if data[:url] =~ /\.png/i
mime_type = 'image/png'

View file

@ -10,9 +10,9 @@ class Channel < ApplicationModel
c = eval 'Channel::' + channel[:adapter].upcase + '.new'
c.fetch(channel)
rescue Exception => e
puts "can't use " + 'Channel::' + channel[:adapter].upcase
puts e.inspect
puts e.backtrace
logger.error "can't use " + 'Channel::' + channel[:adapter].upcase
logger.error e.inspect
logger.error e.backtrace
c.disconnect
end
}

View file

@ -337,8 +337,8 @@ class Channel::EmailParser
begin
backend.run( channel, mail )
rescue Exception => e
puts "can't run postmaster pre filter #{backend}"
puts e.inspect
Rails.logger.error "can't run postmaster pre filter #{backend}"
Rails.logger.error e.inspect
return false
end
}
@ -364,7 +364,6 @@ class Channel::EmailParser
user = User.where( email: mail[ 'x-zammad-customer-email'.to_sym ] || mail[:from_email] ).first
end
if !user
puts 'create user...'
user = user_create(
login: mail[ 'x-zammad-customer-login'.to_sym ] || mail[ 'x-zammad-customer-email'.to_sym ] || mail[:from_email],
firstname: mail[ 'x-zammad-customer-firstname'.to_sym ] || mail[:from_display_name],
@ -485,8 +484,8 @@ class Channel::EmailParser
begin
backend.run( channel, mail, ticket, article, user )
rescue Exception => e
puts "can't run postmaster post filter #{backend}"
puts e.inspect
Rails.logger.error "can't run postmaster post filter #{backend}"
Rails.logger.error e.inspect
end
}
@ -540,10 +539,10 @@ class Channel::EmailParser
key_short = key[ 0, key.length - 3 ]
header = "x-zammad-#{header_name}-#{key_short}"
if mail[ header.to_sym ]
puts "NOTICE: header #{header} found #{mail[ header.to_sym ]}"
Rails.logger.info "header #{header} found #{mail[ header.to_sym ]}"
item_object.class.reflect_on_all_associations.map { |assoc|
if assoc.name.to_s == key_short
puts "NOTICE: ASSOC found #{assoc.class_name} lookup #{mail[ header.to_sym ]}"
Rails.logger.info "ASSOC found #{assoc.class_name} lookup #{mail[ header.to_sym ]}"
item = assoc.class_name.constantize
if item.respond_to?(:name)
@ -563,7 +562,7 @@ class Channel::EmailParser
# check if attribute exists
header = "x-zammad-#{header_name}-#{key}"
if mail[ header.to_sym ]
puts "NOTICE: header #{header} found #{mail[ header.to_sym ]}"
Rails.logger.info "header #{header} found #{mail[ header.to_sym ]}"
item_object[key] = mail[ header.to_sym ]
end
}

View file

@ -9,9 +9,9 @@ module Channel::EmailSend
c = eval 'Channel::' + channel[:adapter] + '.new'
c.send(attr, channel, notification)
rescue Exception => e
puts "can't use " + 'Channel::' + channel[:adapter]
puts e.inspect
puts e.backtrace
Rails.logger.error "can't use " + 'Channel::' + channel[:adapter]
Rails.logger.error e.inspect
Rails.logger.error e.backtrace
end
end
end

View file

@ -8,7 +8,7 @@ module Channel::Filter::Database
# process postmaster filter
filters = PostmasterFilter.where( active: true, channel: 'email' )
filters.each {|filter|
puts " proccess filter #{filter.name} ..."
Rails.logger.info " proccess filter #{filter.name} ..."
match = true
loop = false
filter[:match].each {|key, value|
@ -19,21 +19,21 @@ module Channel::Filter::Database
scan = mail[ key.downcase.to_sym ].scan(/#{value}/i)
end
if match && scan[0]
puts " matching #{ key.downcase }:'#{ mail[ key.downcase.to_sym ] }' on #{value}"
Rails.logger.info " matching #{ key.downcase }:'#{ mail[ key.downcase.to_sym ] }' on #{value}"
match = true
else
puts " is not matching #{ key.downcase }:'#{ mail[ key.downcase.to_sym ] }' on #{value}"
Rails.logger.info " is not matching #{ key.downcase }:'#{ mail[ key.downcase.to_sym ] }' on #{value}"
match = false
end
rescue Exception => e
match = false
puts "can't use match rule #{value} on #{mail[ key.to_sym ]}"
puts e.inspect
Rails.logger.error "can't use match rule #{value} on #{mail[ key.to_sym ]}"
Rails.logger.error e.inspect
end
}
if loop && match
filter[:perform].each {|key, value|
puts " perform '#{ key.downcase }' = '#{value}'"
Rails.logger.info " perform '#{ key.downcase }' = '#{value}'"
mail[ key.downcase.to_sym ] = value
}
end

View file

@ -12,7 +12,7 @@ class Channel::IMAP < Channel::EmailParser
port = 143
end
puts "fetching imap (#{channel[:options][:host]}/#{channel[:options][:user]} port=#{port},ssl=#{ssl})"
logger.info "fetching imap (#{channel[:options][:host]}/#{channel[:options][:user]} port=#{port},ssl=#{ssl})"
# on check, reduce open_timeout to have faster probing
timeout = 12
@ -42,11 +42,11 @@ class Channel::IMAP < Channel::EmailParser
@imap.select( channel[:options][:folder] )
end
if check_type == 'check'
puts 'check only mode, fetch no emails'
logger.info 'check only mode, fetch no emails'
disconnect
return
elsif check_type == 'verify'
puts "verify mode, fetch no emails #{verify_string}"
logger.info "verify mode, fetch no emails #{verify_string}"
end
message_ids = @imap.search(['ALL'])
@ -60,14 +60,14 @@ class Channel::IMAP < Channel::EmailParser
message_ids.each do |message_id|
count += 1
puts " - message #{count.to_s}/#{count_all.to_s}"
# puts msg.to_s
logger.info " - message #{count.to_s}/#{count_all.to_s}"
#logger.info msg.to_s
# check for verify message
if check_type == 'verify'
subject = @imap.fetch(message_id, 'ENVELOPE')[0].attr['ENVELOPE'].subject
if subject && subject =~ /#{verify_string}/
puts " - verify email #{verify_string} found"
logger.info " - verify email #{verify_string} found"
@imap.store(message_id, '+FLAGS', [:Deleted])
@imap.expunge()
disconnect
@ -85,15 +85,13 @@ class Channel::IMAP < Channel::EmailParser
@imap.expunge()
disconnect
if count == 0
puts ' - no message'
logger.info ' - no message'
end
puts 'done'
logger.info 'done'
end
def disconnect
return if !@imap
@imap.disconnect()
end
end

View file

@ -2,7 +2,7 @@
class Channel::MailStdin < Channel::EmailParser
def initialize
puts 'read main from STDIN'
logger.info 'read main from STDIN'
msg = ARGF.read

View file

@ -12,7 +12,7 @@ class Channel::POP3 < Channel::EmailParser
port = 110
end
puts "fetching pop3 (#{channel[:options][:host]}/#{channel[:options][:user]} port=#{port},ssl=#{ssl})"
logger.info "fetching pop3 (#{channel[:options][:host]}/#{channel[:options][:user]} port=#{port},ssl=#{ssl})"
@pop = Net::POP3.new( channel[:options][:host], port )
@ -27,11 +27,11 @@ class Channel::POP3 < Channel::EmailParser
end
@pop.start( channel[:options][:user], channel[:options][:password] )
if check_type == 'check'
puts 'check only mode, fetch no emails'
logger.info 'check only mode, fetch no emails'
disconnect
return
elsif check_type == 'verify'
puts 'verify mode, fetch no emails'
logger.info 'verify mode, fetch no emails'
end
mails = @pop.mails
@ -45,13 +45,13 @@ class Channel::POP3 < Channel::EmailParser
mails.each do |m|
count += 1
puts " - message #{count.to_s}/#{count_all.to_s}"
logger.info " - message #{count.to_s}/#{count_all.to_s}"
# check for verify message
if check_type == 'verify'
mail = m.pop
if mail && mail =~ /#{verify_string}/
puts " - verify email #{verify_string} found"
logger.info " - verify email #{verify_string} found"
m.delete
disconnect
return 'verify ok'
@ -66,15 +66,13 @@ class Channel::POP3 < Channel::EmailParser
end
disconnect
if count == 0
puts ' - no message'
logger.info ' - no message'
end
puts 'done'
logger.info 'done'
end
def disconnect
return if !@pop
@pop.finish
end

View file

@ -21,13 +21,13 @@ class Channel::TWITTER2
def fetch (channel)
puts "fetching tweets (oauth_token#{channel[:options][:oauth_token]})"
logger.info "fetching tweets (oauth_token#{channel[:options][:oauth_token]})"
@client = connect(channel)
# search results
if channel[:options][:search]
channel[:options][:search].each { |search|
puts " - searching for #{search[:item]}"
logger.info " - searching for #{search[:item]}"
tweets = []
@client.search( search[:item], count: 50, result_type: 'recent' ).collect do |tweet|
tweets.push tweet
@ -39,7 +39,7 @@ class Channel::TWITTER2
# mentions
if channel[:options][:mentions]
puts ' - searching for mentions'
logger.info ' - searching for mentions'
tweets = @client.mentions_timeline
@article_type = 'twitter status'
fetch_loop( tweets, channel, channel[:options][:mentions][:group] )
@ -47,12 +47,12 @@ class Channel::TWITTER2
# direct messages
if channel[:options][:direct_messages]
puts ' - searching for direct_messages'
logger.info ' - searching for direct_messages'
tweets = @client.direct_messages
@article_type = 'twitter direct-message'
fetch_loop( tweets, channel, channel[:options][:direct_messages][:group] )
end
puts 'done'
logger.info 'done'
disconnect
end
@ -68,7 +68,7 @@ class Channel::TWITTER2
all_tweets.push tweet
end
else
puts 'UNKNOWN: ' + result_class.to_s
logger.error 'UNKNOWN: ' + result_class.to_s
end
# find tweets
@ -86,7 +86,7 @@ class Channel::TWITTER2
# reset current_user
UserInfo.current_user_id = 1
puts 'import tweet'
logger.info 'import tweet'
fetch_import( tweet, channel, group )
end
@ -113,7 +113,7 @@ class Channel::TWITTER2
begin
sender = @client.user(tweet.from_user_id)
rescue Exception => e
puts 'Exception: twitter: ' + e.inspect
logger.error 'Exception: twitter: ' + e.inspect
return
end
end
@ -121,16 +121,16 @@ class Channel::TWITTER2
# check if parent exists
user = nil, ticket = nil, article = nil
if tweet.respond_to?('in_reply_to_status_id') && tweet.in_reply_to_status_id && tweet.in_reply_to_status_id.to_s != ''
puts 'import in_reply_tweet ' + tweet.in_reply_to_status_id.to_s
logger.info 'import in_reply_tweet ' + tweet.in_reply_to_status_id.to_s
tweet_sub = @client.status( tweet.in_reply_to_status_id )
# puts tweet_sub.inspect
#logger.debug tweet_sub.inspect
(user, ticket, article) = fetch_import(tweet_sub, channel, group)
end
# create stuff
user = fetch_user_create(tweet, sender)
if !ticket
puts 'create new ticket...'
logger.info 'create new ticket...'
ticket = fetch_ticket_create(user, tweet, sender, channel, group)
end
article = fetch_article_create(user, ticket, tweet, sender)
@ -144,11 +144,11 @@ class Channel::TWITTER2
auth = Authorization.where( uid: sender.id, provider: 'twitter' ).first
user = nil
if auth
puts 'user_id', auth.user_id
logger.info 'user_id', auth.user_id
user = User.where( id: auth.user_id ).first
end
if !user
puts 'create user...'
logger.info 'create user...'
roles = Role.where( name: 'Customer' )
user = User.create(
login: sender.screen_name,
@ -163,7 +163,7 @@ class Channel::TWITTER2
updated_by_id: 1,
created_by_id: 1
)
puts 'autentication create...'
logger.info 'autentication create...'
authentication = Authorization.create(
uid: sender.id,
username: sender.screen_name,
@ -171,7 +171,7 @@ class Channel::TWITTER2
provider: 'twitter'
)
else
puts 'user exists'#, user.inspect
logger.info 'user exists'#, user.inspect
end
# set current user
@ -182,13 +182,13 @@ class Channel::TWITTER2
def fetch_ticket_create(user, tweet, sender, channel, group)
# puts '+++++++++++++++++++++++++++' + tweet.inspect
#logger.info '+++++++++++++++++++++++++++' + tweet.inspect
# check if ticket exists
if tweet.respond_to?('in_reply_to_status_id') && tweet.in_reply_to_status_id && tweet.in_reply_to_status_id.to_s != ''
puts 'tweet.in_reply_to_status_id found: ' + tweet.in_reply_to_status_id.to_s
logger.info 'tweet.in_reply_to_status_id found: ' + tweet.in_reply_to_status_id.to_s
article = Ticket::Article.where( message_id: tweet.in_reply_to_status_id.to_s ).first
if article
puts 'article with id found tweet.in_reply_to_status_id found: ' + tweet.in_reply_to_status_id.to_s
logger.info 'article with id found tweet.in_reply_to_status_id found: ' + tweet.in_reply_to_status_id.to_s
return article.ticket
end
end
@ -279,13 +279,13 @@ class Channel::TWITTER2
config.access_token_secret = channel[:options][:oauth_token_secret]
end
if attr[:type] == 'twitter direct-message'
puts 'to:' + attr[:to].to_s
logger.info 'to:' + attr[:to].to_s
dm = client.create_direct_message(
attr[:to].to_s,
attr[:body].to_s,
{}
)
# puts dm.inspect
# logger.info dm.inspect
return dm
end
@ -297,7 +297,7 @@ class Channel::TWITTER2
in_reply_to_status_id: attr[:in_reply_to]
}
)
# puts message.inspect
# logger.debug message.inspect
message
end
end

View file

@ -47,19 +47,19 @@ class Job < ApplicationModel
.limit( 1_000 )
job.processed = tickets.count
tickets.each do |ticket|
#puts "CHANGE #{job.execute.inspect}"
logger.debug "CHANGE #{job.execute.inspect}"
changed = false
job.execute.each do |key, value|
changed = true
attribute = key.split('.', 2).last
#puts "-- #{Ticket.columns_hash[ attribute ].type.to_s}"
logger.debug "-- #{Ticket.columns_hash[ attribute ].type}"
#value = 4
#if Ticket.columns_hash[ attribute ].type == :integer
# puts "to i #{attribute}/#{value.inspect}/#{value.to_i.inspect}"
# logger.debug "to i #{attribute}/#{value.inspect}/#{value.to_i.inspect}"
# #value = value.to_i
#end
ticket[attribute] = value
#puts "set #{attribute} = #{value.inspect}"
logger.debug "set #{attribute} = #{value.inspect}"
end
next if !changed
ticket.updated_by_id = 1

View file

@ -24,7 +24,6 @@ class Observer::Ticket::ArticleChanges < ActiveRecord::Observer
# set frist response
def first_response_update(record)
# puts 'check first response'
# return if we run import mode
return if Setting.get('import_mode')
@ -62,7 +61,6 @@ class Observer::Ticket::ArticleChanges < ActiveRecord::Observer
# set last contact
def last_contact_update(record)
# puts 'check last contact'
# if article in internal
return true if record.internal

View file

@ -14,7 +14,6 @@ class Observer::Ticket::CloseTime < ActiveRecord::Observer
private
def _check(record)
#puts 'check close time'
# return if we run import mode
return if Setting.get('import_mode')

View file

@ -111,8 +111,8 @@ class Observer::Ticket::Notification < ActiveRecord::Observer
# return if we run import mode
return if Setting.get('import_mode')
# puts 'CREATED!!!!'
# puts record.inspect
# logger.info 'CREATED!!!!'
# logger.info record.inspect
e = {
name: record.class.name,
type: 'create',
@ -163,10 +163,10 @@ class Observer::Ticket::Notification < ActiveRecord::Observer
# return if we run import mode
return if Setting.get('import_mode')
# puts 'after_update'
# puts record.inspect
# puts '-----'
# puts @a.inspect
# AuditTrail.new(record, "UPDATED")
# logger.info 'after_update'
# logger.info record.inspect
# logger.info '-----'
# logger.info @a.inspect
# AuditTrail.new(record, "UPDATED")
end
end

View file

@ -117,7 +117,7 @@ class Observer::Ticket::Notification::BackgroundJob
notification[:subject] = ticket.subject_build( notification[:subject] )
# send notification
puts "send ticket notifiaction to agent (#{@p[:type]}/#{ticket.id}/#{user.email})"
logger.info "send ticket notifiaction to agent (#{@p[:type]}/#{ticket.id}/#{user.email})"
NotificationFactory.send(
recipient: user,

View file

@ -4,7 +4,6 @@ class Observer::Ticket::ResetNewState < ActiveRecord::Observer
observe 'ticket::_article'
def after_create(record)
# puts 'check reset new state'
# return if we run import mode
return if Setting.get('import_mode')

View file

@ -317,9 +317,9 @@ class Package < ApplicationModel
begin
load entry
rescue => e
puts "TRIED TO RELOAD '#{entry}'"
puts 'ERROR: ' + e.inspect
puts 'Traceback: ' + e.backtrace.inspect
logger.error "TRIED TO RELOAD '#{entry}'"
logger.error 'ERROR: ' + e.inspect
logger.error 'Traceback: ' + e.backtrace.inspect
end
end
}
@ -331,7 +331,7 @@ class Package < ApplicationModel
begin
package = REXML::Document.new( xml )
rescue => e
puts 'ERROR: ' + e.inspect
logger.error 'ERROR: ' + e.inspect
return
end
logger.debug package.inspect

View file

@ -1,5 +1,5 @@
# Copyright (C) 2012-2014 Zammad Foundation, http://zammad-foundation.org/
# rubocop:disable Rails/Output
class Scheduler < ApplicationModel
def self.run( runner, runner_count )
@ -14,7 +14,7 @@ class Scheduler < ApplicationModel
begin
ActiveRecord::Base.connection.reconnect!
rescue => e
puts "Can't reconnect to database #{ e.inspect }"
logger.error "Can't reconnect to database #{ e.inspect }"
end
# read/load jobs and check if it is alredy started
@ -70,13 +70,13 @@ class Scheduler < ApplicationModel
logger.info "execute #{job.method} (runner #{runner} of #{runner_count}, try_count #{try_count})..."
eval job.method()
rescue => e
puts "execute #{job.method} (runner #{runner} of #{runner_count}, try_count #{try_count}) exited with error #{ e.inspect }"
logger.error "execute #{job.method} (runner #{runner} of #{runner_count}, try_count #{try_count}) exited with error #{ e.inspect }"
# reconnect in case db connection is lost
begin
ActiveRecord::Base.connection.reconnect!
rescue => e
puts "Can't reconnect to database #{ e.inspect }"
logger.error "Can't reconnect to database #{ e.inspect }"
end
try_run_max = 10
@ -129,7 +129,7 @@ class Scheduler < ApplicationModel
puts "CRITICAL - no such scheduler jobs '#{name}'"
return true
end
#puts "S " + scheduler.inspect
logger.debug scheduler.inspect
if !scheduler.last_run
puts "CRITICAL - scheduler jobs never started '#{name}'"
exit 2

View file

@ -45,10 +45,10 @@ class Store
Store::File.all.each {|item|
content = item.content
sha = Digest::SHA256.hexdigest( content )
puts "CHECK: Store::File.find(#{item.id}) "
logger.info "CHECK: Store::File.find(#{item.id}) "
if sha != item.sha
success = false
puts "DIFF: sha diff of Store::File.find(#{item.id}) "
logger.error "DIFF: sha diff of Store::File.find(#{item.id}) "
if fix_it
item.update_attribute( :sha, sha )
end
@ -77,7 +77,7 @@ class Store
# remove from old provider
adapter_source.delete( item.sha )
puts "NOTICE: Moved file #{item.sha} from #{source} to #{target}"
logger.info "NOTICE: Moved file #{item.sha} from #{source} to #{target}"
}
true
end

View file

@ -38,18 +38,18 @@ class Store::Provider::File
# unlink file from fs
def self.unlink_from_fs(sha)
if File.exist?( get_locaton(sha) )
puts "NOTICE: storge remove '#{ get_locaton(sha) }'"
Rails.logger.info "storge remove '#{ get_locaton(sha) }'"
File.delete( get_locaton(sha) )
end
end
# read file from fs
def self.read_from_fs(sha)
puts "read from fs #{ get_locaton(sha) }"
Rails.logger.info "read from fs #{ get_locaton(sha) }"
if !File.exist?( get_locaton(sha) )
raise "ERROR: No such file #{ get_locaton(sha) }"
end
data = File.open( get_locaton(sha), 'rb' )
data = File.open( get_locaton(sha), 'rb' )
content = data.read
# check sha
@ -66,7 +66,7 @@ class Store::Provider::File
# install file
permission = '600'
if !File.exist?( get_locaton(sha) )
puts "NOTICE: storge write '#{ get_locaton(sha) }' (#{permission})"
Rails.logger.info "storge write '#{ get_locaton(sha) }' (#{permission})"
file = File.new( get_locaton(sha), 'wb' )
file.write( data )
file.close

View file

@ -30,7 +30,6 @@ load translations from online
ActiveRecord::Base.transaction do
result.data.each {|translation|
#puts translation.inspect
# handle case insensitive sql
exists = Translation.where(locale: translation['locale'], format: translation['format'], source: translation['source'])

View file

@ -42,4 +42,6 @@ Zammad::Application.configure do
# define cache store
config.cache_store = :file_store, 'tmp/cache_file_store_development'
# format log
config.log_formatter = Logger::Formatter.new
end

View file

@ -76,7 +76,10 @@ Zammad::Application.configure do
config.dependency_loading = true
# Use default logging formatter so that PID and timestamp are not suppressed
config.log_formatter = ::Logger::Formatter.new
#config.log_formatter = ::Logger::Formatter.new
# format log
config.log_formatter = Logger::Formatter.new
# define cache store
config.cache_store = :file_store, 'tmp/cache_file_store_production'

View file

@ -49,4 +49,6 @@ Zammad::Application.configure do
# define cache store
config.cache_store = :file_store, 'tmp/cache_file_store_test'
# format log
config.log_formatter = Logger::Formatter.new
end

View file

@ -1,44 +0,0 @@
Zammad::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Configure static asset server for tests with Cache-Control for performance
config.serve_static_assets = true
config.static_cache_control = 'public, max-age=3600'
# Disable assert compression for relyable error code lines
config.assets.compress = false
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = true
# Generate digests for assets URLs
config.assets.digest = true
# Show full error reports and disable caching
config.consider_all_requests_local = true
config.action_controller.perform_caching = true
# Raise exceptions instead of rendering exception templates
config.action_dispatch.show_exceptions = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Print deprecation notices to the stderr
config.active_support.deprecation = :stderr
# Disable request forgery protection in test environment
config.action_controller.allow_forgery_protection = false
# Enable autoload
config.dependency_loading = true
end

View file

@ -9,7 +9,6 @@ delete a cache
=end
def self.delete( key )
# puts 'Cache.delete' + key.to_s
Rails.cache.delete( key.to_s )
end
@ -31,11 +30,10 @@ write a cache
if !params[:expires_in]
params[:expires_in] = 24.hours
end
# puts 'Cache.write: ' + key.to_s
begin
Rails.cache.write( key.to_s, data, params)
rescue Exception => e
puts "NOTICE: #{e.message}"
Rails.logger.error "NOTICE: #{e.message}"
end
end
@ -48,7 +46,6 @@ get a cache
=end
def self.get( key )
# puts 'Cache.get: ' + key.to_s
Rails.cache.read( key.to_s )
end
@ -61,7 +58,6 @@ clear whole cache store
=end
def self.clear
# puts 'Cache.clear...'
# workaround, set test cache before clear whole cache, Rails.cache.clear complains about not existing cache dir
Cache.write( 'test', 1 )

View file

@ -22,19 +22,17 @@ module Encode
string.encode!( 'UTF-8', 'Windows-1252' )
rescue EncodingError => e
puts "Bad encoding: #{string.inspect}"
Rails.logger.error "Bad encoding: #{string.inspect}"
string = string.encode!( 'UTF-8', invalid: :replace, undef: :replace, replace: '?' )
end
return string
end
# puts '-------' + charset
# puts string
# convert string
begin
string.encode!( 'UTF-8', charset )
rescue => e
puts 'ERROR: ' + e.inspect
Rails.logger.error 'ERROR: ' + e.inspect
string
end
# Iconv.conv( 'UTF8', charset, string )

View file

@ -1,3 +1,4 @@
# rubocop:disable Rails/Output
module FillDB
def self.load( agents, customers, groups, organizations, tickets )
puts 'load db with:'

View file

@ -6,10 +6,10 @@ module Rss
return items if items
begin
puts 'fetch rss...'
Rails.logger.info "fetch rss... #{url}"
response = UserAgent.request(url)
if !response.success?
raise "Can't fetch '#{url}', http code: #{response.code.to_s}"
raise "Can't fetch '#{url}', http code: #{response.code}"
return
end
rss = SimpleRSS.parse response.body
@ -29,8 +29,8 @@ module Rss
}
Cache.write( cache_key, items, expires_in: 4.hours )
rescue Exception => e
puts "can't fetch #{url}"
puts e.inspect
Rails.logger.error "can't fetch #{url}"
Rails.logger.error e.inspect
return
end

View file

@ -43,8 +43,8 @@ create/update/delete index
return SearchIndexBackend.remove( data[:name] )
end
puts "# curl -X PUT \"#{url}\" \\"
#puts "-d '#{data[:data].to_json}'"
Rails.logger.info "# curl -X PUT \"#{url}\" \\"
#Rails.logger.info "-d '#{data[:data].to_json}'"
response = UserAgent.put(
url,
@ -75,8 +75,8 @@ add new object to search index
url = build_url( type, data['id'] )
return if !url
puts "# curl -X POST \"#{url}\" \\"
#puts "-d '#{data.to_json}'"
Rails.logger.info "# curl -X POST \"#{url}\" \\"
#Rails.logger.info "-d '#{data.to_json}'"
response = UserAgent.post(
url,
@ -89,7 +89,7 @@ add new object to search index
password: Setting.get('es_password'),
}
)
puts "# #{response.code.to_s}"
Rails.logger.info "# #{response.code.to_s}"
return true if response.success?
raise response.inspect
end
@ -119,9 +119,9 @@ remove whole data from index
password: Setting.get('es_password'),
}
)
#puts "# #{response.code.to_s}"
#Rails.logger.info "# #{response.code.to_s}"
return true if response.success?
#puts "NOTICE: can't drop index: " + response.inspect
#Rails.logger.info "NOTICE: can't drop index: " + response.inspect
false
end
@ -193,8 +193,8 @@ return search result
}
data['query']['bool']['must'].push condition
puts "# curl -X POST \"#{url}\" \\"
#puts " -d'#{data.to_json}'"
Rails.logger.info "# curl -X POST \"#{url}\" \\"
#Rails.logger.info " -d'#{data.to_json}'"
response = UserAgent.get(
url,
@ -208,7 +208,7 @@ return search result
}
)
puts "# #{response.code.to_s}"
Rails.logger.info "# #{response.code.to_s}"
if !response.success?
puts "ERROR: #{response.inspect}"
return []

View file

@ -247,12 +247,12 @@ returns
data = nil
if !File.exist? session_dir
self.destory(client_id)
puts "ERROR: missing session directory for '#{client_id}', remove session."
Rails.logger.error "missing session directory for '#{client_id}', remove session."
return
end
if !File.exist? session_file
self.destory(client_id)
puts "ERROR: missing session file for '#{client_id}', remove session."
Rails.logger.errror "missing session file for '#{client_id}', remove session."
return
end
begin
@ -267,9 +267,9 @@ returns
end
}
rescue Exception => e
puts e.inspect
Rails.logger.error e.inspect
self.destory(client_id)
puts "ERROR: reading session file '#{session_file}', remove session."
Rails.logger.error "ERROR: reading session file '#{session_file}', remove session."
return
end
data
@ -452,7 +452,7 @@ returns
begin
message_parsed = JSON.parse( spool['msg'] )
rescue => e
log 'error', "can't parse spool message: #{ message }, #{ e.inspect }"
Rails.logger.error "can't parse spool message: #{ message }, #{ e.inspect }"
next
end
@ -567,19 +567,19 @@ returns
=end
def self.thread_client(client_id, try_count = 0, try_run_time = Time.now)
puts "LOOP #{client_id} - #{try_count}"
Rails.logger.info "LOOP #{client_id} - #{try_count}"
begin
Sessions::Client.new(client_id)
rescue => e
puts "thread_client #{client_id} exited with error #{ e.inspect }"
puts e.backtrace.join("\n ")
Rails.logger.error "thread_client #{client_id} exited with error #{ e.inspect }"
Rails.logger.error e.backtrace.join("\n ")
sleep 10
begin
# ActiveRecord::Base.remove_connection
# ActiveRecord::Base.connection_pool.reap
ActiveRecord::Base.connection_pool.release_connection
rescue => e
puts "Can't reconnect to database #{ e.inspect }"
Rails.logger.error "Can't reconnect to database #{ e.inspect }"
end
try_run_max = 10
@ -598,7 +598,7 @@ returns
raise "STOP thread_client for client #{client_id} after #{try_run_max} tries"
end
end
puts "/LOOP #{client_id} - #{try_count}"
Rails.logger.info "/LOOP #{client_id} - #{try_count}"
end
def self.symbolize_keys(hash)

View file

@ -20,7 +20,7 @@ module Sso::Otrs
user = User.where( login: result['user']['UserLogin'], active: true ).first
if !user
Rails.logger.notice "No such user #{result['user']['UserLogin']}, requested for SSO!"
Rails.logger.info "No such user #{result['user']['UserLogin']}, requested for SSO!"
return
end