Corrected with rubocop cop 'Style/RedundantSelf'.

This commit is contained in:
Thorsten Eckel 2015-05-07 14:10:38 +02:00
parent 12c0ae1150
commit 67c4215554
61 changed files with 348 additions and 350 deletions

View file

@ -187,8 +187,6 @@ Style/Documentation:
Description: 'Document classes and non-namespace modules.'
Enabled: false
Style/RedundantSelf:
Enabled: false
Style/CommentIndentation:
Enabled: false
Style/GlobalVars:

View file

@ -80,13 +80,13 @@ returns
# only use object attributes
data = {}
self.new.attributes.each {|item|
new.attributes.each {|item|
next if !params.key?(item[0])
data[item[0].to_sym] = params[item[0]]
}
# we do want to set this via database
self.param_validation(data)
param_validation(data)
end
=begin
@ -118,7 +118,7 @@ returns
list_of_items.each {|item|
list.push( assoc.klass.find(item) )
}
self.send( assoc.name.to_s + '=', list )
send( assoc.name.to_s + '=', list )
}
end
@ -142,7 +142,7 @@ returns
self.class.reflect_on_all_associations.map { |assoc|
real_key = assoc.name.to_s[0, assoc.name.to_s.length - 1] + '_ids'
if self.respond_to?( real_key )
attributes[ real_key ] = self.send( real_key )
attributes[ real_key ] = send( real_key )
end
}
attributes
@ -190,8 +190,8 @@ returns
def fill_up_user_create
if self.class.column_names.include? 'updated_by_id'
if UserInfo.current_user_id
if self.updated_by_id && self.updated_by_id != UserInfo.current_user_id
logger.info "NOTICE create - self.updated_by_id is different: #{self.updated_by_id}/#{UserInfo.current_user_id}"
if updated_by_id && updated_by_id != UserInfo.current_user_id
logger.info "NOTICE create - self.updated_by_id is different: #{updated_by_id}/#{UserInfo.current_user_id}"
end
self.updated_by_id = UserInfo.current_user_id
end
@ -201,8 +201,8 @@ returns
return if !UserInfo.current_user_id
if self.created_by_id && self.created_by_id != UserInfo.current_user_id
logger.info "NOTICE create - self.created_by_id is different: #{self.created_by_id}/#{UserInfo.current_user_id}"
if created_by_id && created_by_id != UserInfo.current_user_id
logger.info "NOTICE create - self.created_by_id is different: #{created_by_id}/#{UserInfo.current_user_id}"
end
self.created_by_id = UserInfo.current_user_id
end
@ -229,29 +229,29 @@ returns
end
def cache_update(o)
self.cache_delete if self.respond_to?('cache_delete')
cache_delete if self.respond_to?('cache_delete')
o.cache_delete if o.respond_to?('cache_delete')
end
def cache_delete
# delete id caches
key = self.class.to_s + '::' + self.id.to_s
key = self.class.to_s + '::' + id.to_s
Cache.delete( key.to_s )
key = self.class.to_s + ':f:' + self.id.to_s
key = self.class.to_s + ':f:' + id.to_s
Cache.delete( key.to_s )
# delete old name / login caches
if self.changed?
if self.changes.key?('name')
name = self.changes['name'][0].to_s
if changes.key?('name')
name = changes['name'][0].to_s
key = self.class.to_s + '::' + name
Cache.delete( key.to_s )
key = self.class.to_s + ':f:' + name
Cache.delete( key.to_s )
end
if self.changes.key?('login')
name = self.changes['login'][0].to_s
if changes.key?('login')
name = changes['login'][0].to_s
key = self.class.to_s + '::' + name
Cache.delete( key.to_s )
key = self.class.to_s + ':f:' + name
@ -269,26 +269,26 @@ returns
return if !self[:login]
key = self.class.to_s + '::' + self.login.to_s
key = self.class.to_s + '::' + login.to_s
Cache.delete( key.to_s )
key = self.class.to_s + ':f:' + self.login.to_s
key = self.class.to_s + ':f:' + login.to_s
Cache.delete( key.to_s )
end
def self.cache_set(data_id, data, full = false)
if !full
key = self.to_s + '::' + data_id.to_s
key = to_s + '::' + data_id.to_s
else
key = self.to_s + ':f:' + data_id.to_s
key = to_s + ':f:' + data_id.to_s
end
Cache.write( key.to_s, data )
end
def self.cache_get(data_id, full = false)
if !full
key = self.to_s + '::' + data_id.to_s
key = to_s + '::' + data_id.to_s
else
key = self.to_s + ':f:' + data_id.to_s
key = to_s + ':f:' + data_id.to_s
end
Cache.get( key.to_s )
end
@ -309,32 +309,32 @@ returns
def self.lookup(data)
if data[:id]
cache = self.cache_get( data[:id] )
cache = cache_get( data[:id] )
return cache if cache
record = self.find_by( id: data[:id] )
self.cache_set( data[:id], record )
record = find_by( id: data[:id] )
cache_set( data[:id], record )
return record
elsif data[:name]
cache = self.cache_get( data[:name] )
cache = cache_get( data[:name] )
return cache if cache
records = self.where( name: data[:name] )
records = where( name: data[:name] )
records.each {|record|
if record.name == data[:name]
self.cache_set( data[:name], record )
cache_set( data[:name], record )
return record
end
}
return
elsif data[:login]
cache = self.cache_get( data[:login] )
cache = cache_get( data[:login] )
return cache if cache
records = self.where( login: data[:login] )
records = where( login: data[:login] )
records.each {|record|
if record.login == data[:login]
self.cache_set( data[:login], record )
cache_set( data[:login], record )
return record
end
}
@ -358,25 +358,25 @@ returns
def self.create_if_not_exists(data)
if data[:id]
record = self.find_by( id: data[:id] )
record = find_by( id: data[:id] )
return record if record
elsif data[:name]
records = self.where( name: data[:name] )
records = where( name: data[:name] )
records.each {|record|
return record if record.name == data[:name]
}
elsif data[:login]
records = self.where( login: data[:login] )
records = where( login: data[:login] )
records.each {|record|
return record if record.login == data[:login]
}
elsif data[:locale] && data[:source]
records = self.where( locale: data[:locale], source: data[:source] )
records = where( locale: data[:locale], source: data[:source] )
records.each {|record|
return record if record.source == data[:source]
}
end
self.create(data)
create(data)
end
=begin
@ -393,45 +393,45 @@ returns
def self.create_or_update(data)
if data[:id]
records = self.where( id: data[:id] )
records = where( id: data[:id] )
records.each {|record|
record.update_attributes( data )
return record
}
record = self.new( data )
record = new( data )
record.save
return record
elsif data[:name]
records = self.where( name: data[:name] )
records = where( name: data[:name] )
records.each {|record|
if record.name == data[:name]
record.update_attributes( data )
return record
end
}
record = self.new( data )
record = new( data )
record.save
return record
elsif data[:login]
records = self.where( login: data[:login] )
records = where( login: data[:login] )
records.each {|record|
if record.login.downcase == data[:login].downcase
record.update_attributes( data )
return record
end
}
record = self.new( data )
record = new( data )
record.save
return record
elsif data[:locale]
records = self.where( locale: data[:locale] )
records = where( locale: data[:locale] )
records.each {|record|
if record.locale.downcase == data[:locale].downcase
record.update_attributes( data )
return record
end
}
record = self.new( data )
record = new( data )
record.save
return record
else
@ -457,7 +457,7 @@ end
end
def latest_change_set_from_observer
self.class.latest_change_set(self.updated_at)
self.class.latest_change_set(updated_at)
end
def latest_change_set_from_observer_destroy
@ -465,7 +465,7 @@ end
end
def self.latest_change_set(updated_at)
key = "#{self.new.class.name}_latest_change"
key = "#{new.class.name}_latest_change"
expires_in = 31_536_000 # 1 year
if updated_at.nil?
@ -488,17 +488,17 @@ returns
=end
def self.latest_change
key = "#{self.new.class.name}_latest_change"
key = "#{new.class.name}_latest_change"
updated_at = Cache.get( key )
logger.debug "request latest_change for #{key}/#{updated_at}"
# if we do not have it cached, do lookup
if !updated_at
o = self.select(:updated_at).order(updated_at: :desc).limit(1).first
o = select(:updated_at).order(updated_at: :desc).limit(1).first
if o
updated_at = o.updated_at
self.latest_change_set(updated_at)
latest_change_set(updated_at)
end
logger.debug "lookup latest_change for #{key}/#{updated_at}"
end
@ -544,12 +544,12 @@ class OwnModel < ApplicationModel
# return if we run import mode
return if Setting.get('import_mode')
logger.debug "#{ self.class.name }.find(#{ self.id }) notify created " + self.created_at.to_s
logger.debug "#{ self.class.name }.find(#{ id }) notify created " + created_at.to_s
class_name = self.class.name
class_name.gsub!(/::/, '')
Sessions.broadcast(
event: class_name + ':create',
data: { id: self.id, updated_at: self.updated_at }
data: { id: id, updated_at: updated_at }
)
end
@ -573,12 +573,12 @@ class OwnModel < ApplicationModel
# return if we run import mode
return if Setting.get('import_mode')
logger.debug "#{ self.class.name }.find(#{ self.id }) notify UPDATED " + self.updated_at.to_s
logger.debug "#{ self.class.name }.find(#{ id }) notify UPDATED " + updated_at.to_s
class_name = self.class.name
class_name.gsub!(/::/, '')
Sessions.broadcast(
event: class_name + ':update',
data: { id: self.id, updated_at: self.updated_at }
data: { id: id, updated_at: updated_at }
)
end
@ -602,12 +602,12 @@ class OwnModel < ApplicationModel
# return if we run import mode
return if Setting.get('import_mode')
logger.debug "#{ self.class.name }.find(#{ self.id }) notify TOUCH " + self.updated_at.to_s
logger.debug "#{ self.class.name }.find(#{ id }) notify TOUCH " + updated_at.to_s
class_name = self.class.name
class_name.gsub!(/::/, '')
Sessions.broadcast(
event: class_name + ':touch',
data: { id: self.id, updated_at: self.updated_at }
data: { id: id, updated_at: updated_at }
)
end
@ -630,12 +630,12 @@ class OwnModel < ApplicationModel
# return if we run import mode
return if Setting.get('import_mode')
logger.debug "#{ self.class.name }.find(#{ self.id }) notify DESTOY " + self.updated_at.to_s
logger.debug "#{ self.class.name }.find(#{ id }) notify DESTOY " + updated_at.to_s
class_name = self.class.name
class_name.gsub!(/::/, '')
Sessions.broadcast(
event: class_name + ':destroy',
data: { id: self.id, updated_at: self.updated_at }
data: { id: id, updated_at: updated_at }
)
end
@ -672,7 +672,7 @@ update search index, if configured - will be executed automatically
# start background job to transfer data to search index
return if !SearchIndexBackend.enabled?
Delayed::Job.enqueue( ApplicationModel::BackgroundJobSearchIndex.new( self.class.to_s, self.id ) )
Delayed::Job.enqueue( ApplicationModel::BackgroundJobSearchIndex.new( self.class.to_s, id ) )
end
=begin
@ -686,7 +686,7 @@ delete search index object, will be executed automatically
def search_index_destroy
return if !self.class.search_index_support_config
SearchIndexBackend.remove( self.class.to_s, self.id )
SearchIndexBackend.remove( self.class.to_s, id )
end
=begin
@ -699,9 +699,9 @@ reload search index with full data
def self.search_index_reload
return if !@search_index_support_config
all_ids = self.select('id').all.order('created_at DESC')
all_ids = select('id').all.order('created_at DESC')
all_ids.each { |item_with_id|
item = self.find( item_with_id.id )
item = find( item_with_id.id )
item.search_index_update_backend
}
end
@ -762,7 +762,7 @@ log object update activity stream, if configured - will be executed automaticall
end
log = false
self.changes.each {|key, _value|
changes.each {|key, _value|
# do not log created_at and updated_at attributes
next if ignore_attributes[key.to_sym] == true
@ -786,7 +786,7 @@ delete object activity stream, will be executed automatically
def activity_stream_destroy
return if !self.class.activity_stream_support_config
ActivityStream.remove( self.class.to_s, self.id )
ActivityStream.remove( self.class.to_s, id )
end
=begin
@ -819,7 +819,7 @@ log object create history, if configured - will be executed automatically
def history_create
return if !self.class.history_support_config
#logger.debug 'create ' + self.changes.inspect
self.history_log( 'created', self.created_by_id )
history_log( 'created', created_by_id )
end
@ -842,8 +842,8 @@ log object update history with all updated attributes, if configured - will be e
# new record also triggers update, so ignore new records
changes = self.changes
if self.history_changes_last_done
self.history_changes_last_done.each {|key, value|
if history_changes_last_done
history_changes_last_done.each {|key, value|
if changes.key?(key) && changes[key] == value
changes.delete(key)
end
@ -884,8 +884,8 @@ log object update history with all updated attributes, if configured - will be e
value_id[0] = value[0]
value_id[1] = value[1]
if self.respond_to?( attribute_name ) && self.send(attribute_name)
relation_class = self.send(attribute_name).class
if self.respond_to?( attribute_name ) && send(attribute_name)
relation_class = send(attribute_name).class
if relation_class && value_id[0]
relation_model = relation_class.lookup( id: value_id[0] )
if relation_model
@ -916,7 +916,7 @@ log object update history with all updated attributes, if configured - will be e
id_to: value_id[1],
}
#logger.info "HIST NEW #{self.class.to_s}.find(#{self.id}) #{data.inspect}"
self.history_log( 'updated', self.updated_by_id, data )
history_log( 'updated', updated_by_id, data )
}
end
@ -931,7 +931,7 @@ delete object history, will be executed automatically
def history_destroy
return if !self.class.history_support_config
History.remove( self.class.to_s, self.id )
History.remove( self.class.to_s, id )
end
=begin
@ -948,7 +948,7 @@ returns
=end
def attachments
Store.list( object: self.class.to_s, o_id: self.id )
Store.list( object: self.class.to_s, o_id: id )
end
=begin
@ -964,7 +964,7 @@ store attachments for this object
self.attachments_buffer = attachments
# update if object already exists
return if !( self.id && self.id != 0 )
return if !( id && id != 0 )
attachments_buffer_check
end
@ -982,7 +982,7 @@ return object and assets
=end
def self.full(id)
object = self.find(id)
object = find(id)
assets = object.assets({})
{
id: id,
@ -1046,11 +1046,11 @@ get assets of object list
attachments_buffer.each do |attachment|
article_store.push Store.add(
object: self.class.to_s,
o_id: self.id,
o_id: id,
data: attachment.content,
filename: attachment.filename,
preferences: attachment.preferences,
created_by_id: self.created_by_id,
created_by_id: created_by_id,
)
end
attachments_buffer = nil
@ -1066,7 +1066,7 @@ delete object recent viewed list, will be executed automatically
=end
def recent_view_destroy
RecentView.log_destroy( self.class.to_s, self.id )
RecentView.log_destroy( self.class.to_s, id )
end
=begin
@ -1076,7 +1076,7 @@ check string/varchar size and cut them if needed
=end
def check_limits
self.attributes.each {|attribute|
attributes.each {|attribute|
next if !self[ attribute[0] ]
next if self[ attribute[0] ].class != String
next if self[ attribute[0] ].empty?

View file

@ -25,8 +25,8 @@ returns
if !data[ self.class.to_app_model ]
data[ self.class.to_app_model ] = {}
end
if !data[ self.class.to_app_model ][ self.id ]
data[ self.class.to_app_model ][ self.id ] = self.attributes_with_associations
if !data[ self.class.to_app_model ][ id ]
data[ self.class.to_app_model ][ id ] = attributes_with_associations
end
return data if !self['created_by_id'] && !self['updated_by_id']

View file

@ -32,7 +32,7 @@ returns
# for performance reasons, Model.search_index_reload will only collect if of object
# get whole data here
data = self.class.find(self.id)
data = self.class.find(id)
# remove ignored attributes
attributes = data.attributes
@ -47,10 +47,10 @@ returns
# update backend
if self.class.column_names.include? 'active'
if self.active
if active
SearchIndexBackend.add( self.class.to_s, attributes )
else
SearchIndexBackend.remove( self.class.to_s, self.id )
SearchIndexBackend.remove( self.class.to_s, id )
end
else
SearchIndexBackend.add( self.class.to_s, attributes )

View file

@ -89,7 +89,7 @@ class Authorization < ApplicationModel
private
def delete_user_cache
self.user.cache_delete
user.cache_delete
end
end

View file

@ -180,10 +180,10 @@ class Channel::EmailParser
# protect process to work fine with spam emails, see test/fixtures/mail15.box
begin
attachs = self._get_attachment( part, data[:attachments], mail )
attachs = _get_attachment( part, data[:attachments], mail )
data[:attachments].concat( attachs )
rescue
attachs = self._get_attachment( part, data[:attachments], mail )
attachs = _get_attachment( part, data[:attachments], mail )
data[:attachments].concat( attachs )
end
}
@ -252,7 +252,7 @@ class Channel::EmailParser
if !file.parts.empty?
a = []
file.parts.each {|p|
attachment = self._get_attachment( p, attachments, mail )
attachment = _get_attachment( p, attachments, mail )
a.concat( attachment )
}
return a

View file

@ -23,7 +23,7 @@ class Channel::Facebook
'id',
'comments',
{
message: self.body
message: body
}
)
# client.direct_message_create(

View file

@ -42,19 +42,19 @@ add a new history entry for an object
# lookups
if data[:history_type]
history_type = self.type_lookup( data[:history_type] )
history_type = type_lookup( data[:history_type] )
end
if data[:history_object]
history_object = self.object_lookup( data[:history_object] )
history_object = object_lookup( data[:history_object] )
end
related_history_object_id = nil
if data[:related_history_object]
related_history_object = self.object_lookup( data[:related_history_object] )
related_history_object = object_lookup( data[:related_history_object] )
related_history_object_id = related_history_object.id
end
history_attribute_id = nil
if data[:history_attribute]
history_attribute = self.attribute_lookup( data[:history_attribute] )
history_attribute = attribute_lookup( data[:history_attribute] )
history_attribute_id = history_attribute.id
end
@ -149,13 +149,13 @@ returns
def self.list( requested_object, requested_object_id, related_history_object = nil, assets = nil )
if !related_history_object
history_object = self.object_lookup( requested_object )
history_object = object_lookup( requested_object )
history = History.where( history_object_id: history_object.id )
.where( o_id: requested_object_id )
.order('created_at ASC, id ASC')
else
history_object_requested = self.object_lookup( requested_object )
history_object_related = self.object_lookup( related_history_object )
history_object_requested = object_lookup( requested_object )
history_object_related = object_lookup( related_history_object )
history = History.where(
'((history_object_id = ? AND o_id = ?) OR (history_object_id = ? AND related_o_id = ? ))',
history_object_requested.id,
@ -174,13 +174,13 @@ returns
end
data = item.attributes
data['object'] = self.object_lookup_id( data['history_object_id'] ).name
data['type'] = self.type_lookup_id( data['history_type_id'] ).name
data['object'] = object_lookup_id( data['history_object_id'] ).name
data['type'] = type_lookup_id( data['history_type_id'] ).name
data.delete('history_object_id')
data.delete('history_type_id')
if data['history_attribute_id']
data['attribute'] = self.attribute_lookup_id( data['history_attribute_id'] ).name
data['attribute'] = attribute_lookup_id( data['history_attribute_id'] ).name
end
data.delete('history_attribute_id')
@ -194,7 +194,7 @@ returns
data.delete( 'value_from' )
end
if !data['related_history_object_id'].nil?
data['related_object'] = self.object_lookup_id( data['related_history_object_id'] ).name
data['related_object'] = object_lookup_id( data['related_history_object_id'] ).name
end
data.delete( 'related_history_object_id' )

View file

@ -75,7 +75,7 @@ class Job < ApplicationModel
private
def updated_matching
count = Ticket.where( self.condition.permit! ).count
count = Ticket.where( condition.permit! ).count
self.matching = count
end

View file

@ -20,7 +20,7 @@ class Link < ApplicationModel
=end
def self.list(data)
linkobject = self.link_object_get( name: data[:link_object] )
linkobject = link_object_get( name: data[:link_object] )
return if !linkobject
items = []
@ -75,19 +75,19 @@ class Link < ApplicationModel
def self.add(data)
if data.key?(:link_type)
linktype = self.link_type_get( name: data[:link_type] )
linktype = link_type_get( name: data[:link_type] )
data[:link_type_id] = linktype.id
data.delete( :link_type )
end
if data.key?(:link_object_source)
linkobject = self.link_object_get( name: data[:link_object_source] )
linkobject = link_object_get( name: data[:link_object_source] )
data[:link_object_source_id] = linkobject.id
data.delete( :link_object_source )
end
if data.key?(:link_object_target)
linkobject = self.link_object_get( name: data[:link_object_target] )
linkobject = link_object_get( name: data[:link_object_target] )
data[:link_object_target_id] = linkobject.id
data.delete( :link_object_target )
end
@ -109,18 +109,18 @@ class Link < ApplicationModel
def self.remove(data)
if data.key?(:link_object_source)
linkobject = self.link_object_get( name: data[:link_object_source] )
linkobject = link_object_get( name: data[:link_object_source] )
data[:link_object_source_id] = linkobject.id
end
if data.key?(:link_object_target)
linkobject = self.link_object_get( name: data[:link_object_target] )
linkobject = link_object_get( name: data[:link_object_target] )
data[:link_object_target_id] = linkobject.id
end
# from one site
if data.key?(:link_type)
linktype = self.link_type_get( name: data[:link_type] )
linktype = link_type_get( name: data[:link_type] )
data[:link_type_id] = linktype.id
end
links = Link.where(
@ -134,7 +134,7 @@ class Link < ApplicationModel
# from the other site
if data.key?(:link_type)
linktype = self.link_type_get( name: @map[ data[:link_type] ] )
linktype = link_type_get( name: @map[ data[:link_type] ] )
data[:link_type_id] = linktype.id
end
links = Link.where(

View file

@ -28,7 +28,7 @@ class ObjectLookup < ApplicationModel
end
# create
lookup = self.create(
lookup = create(
name: name
)
@@cache_object[ name ] = lookup.id

View file

@ -223,7 +223,7 @@ returns:
=end
def self.by_object_as_hash(object, user)
list = self.by_object(object, user)
list = by_object(object, user)
hash = {}
list.each {|item|
hash[ item[:name] ] = item

View file

@ -85,7 +85,7 @@ class Observer::Ticket::Notification::BackgroundJob
recipient_list += user.email.to_s
# ignore if no changes has been done
changes = self.human_changes(user, ticket)
changes = human_changes(user, ticket)
if @p[:type] == 'update' && !article && ( !changes || changes.empty? )
next
end
@ -93,9 +93,9 @@ class Observer::Ticket::Notification::BackgroundJob
# get user based notification template
# if create, send create message / block update messages
if @p[:type] == 'create'
template = self.template_create(user, ticket, article, changes)
template = template_create(user, ticket, article, changes)
elsif @p[:type] == 'update'
template = self.template_update(user, ticket, article, changes)
template = template_update(user, ticket, article, changes)
else
fail "unknown type for notification #{@p[:type]}"
end

View file

@ -37,7 +37,7 @@ class Observer::User::Geo < ActiveRecord::Observer
return if ( current_location == next_location ) && record.preferences['lat'] && record.preferences['lng']
# geo update
self.geo_update(record)
geo_update(record)
end
# update geo data of user

View file

@ -180,7 +180,7 @@ returns:
def notify_clients_after_change
Sessions.send_to(
self.user_id,
user_id,
{
event: 'OnlineNotification::changed',
data: {}

View file

@ -29,10 +29,10 @@ returns
if !data[ User.to_app_model ]
data[ User.to_app_model ] = {}
end
if !data[ Organization.to_app_model ][ self.id ]
data[ Organization.to_app_model ][ self.id ] = self.attributes_with_associations
if data[ Organization.to_app_model ][ self.id ]['member_ids']
data[ Organization.to_app_model ][ self.id ]['member_ids'].each {|user_id|
if !data[ Organization.to_app_model ][ id ]
data[ Organization.to_app_model ][ id ] = attributes_with_associations
if data[ Organization.to_app_model ][ id ]['member_ids']
data[ Organization.to_app_model ][ id ]['member_ids'].each {|user_id|
if !data[ User.to_app_model ][ user_id ]
user = User.lookup( id: user_id )
data = user.assets( data )

View file

@ -24,7 +24,7 @@ returns
# access ok if its own organization
return false if data[:type] != 'ro'
return false if !data[:current_user].organization_id
return true if self.id == data[:current_user].organization_id
return true if id == data[:current_user].organization_id
# no access
return false

View file

@ -50,7 +50,7 @@ returns
# add org member for search index data
attributes['member'] = []
users = User.where( organization_id: self.id )
users = User.where( organization_id: id )
users.each { |user|
attributes['member'].push user.search_index_data
}

View file

@ -13,10 +13,10 @@ class Package < ApplicationModel
def self.build(data)
if data[:file]
xml = self._read_file( data[:file], data[:root] || true )
package = self._parse(xml)
xml = _read_file( data[:file], data[:root] || true )
package = _parse(xml)
elsif data[:string]
package = self._parse( data[:string] )
package = _parse( data[:string] )
end
build_date = REXML::Element.new('build_date')
@ -28,7 +28,7 @@ class Package < ApplicationModel
package.root.insert_after( '//zpm/description', build_host )
package.elements.each('zpm/filelist/file') do |element|
location = element.attributes['location']
content = self._read_file( location, data[:root] )
content = _read_file( location, data[:root] )
base64 = Base64.encode64(content)
element.text = base64
end
@ -55,7 +55,7 @@ class Package < ApplicationModel
end
end
data.each {|file|
self.install( file: path + '/' + file )
install( file: path + '/' + file )
}
data
end
@ -178,10 +178,10 @@ class Package < ApplicationModel
# Package.install( :string => zpm_as_string )
def self.install(data)
if data[:file]
xml = self._read_file( data[:file], true )
package = self._parse(xml)
xml = _read_file( data[:file], true )
package = _parse(xml)
elsif data[:string]
package = self._parse( data[:string] )
package = _parse( data[:string] )
end
# package meta data
@ -207,7 +207,7 @@ class Package < ApplicationModel
end
# uninstall files of old package
self.uninstall(
uninstall(
name: package_db.name,
version: package_db.version,
migration_not_down: true,
@ -233,7 +233,7 @@ class Package < ApplicationModel
permission = element.attributes['permission'] || '644'
base64 = element.text
content = Base64.decode64(base64)
content = self._write_file(location, permission, content)
content = _write_file(location, permission, content)
end
# update package state
@ -258,8 +258,8 @@ class Package < ApplicationModel
fail "No such package '#{package_name}'"
end
file = self._get_bin( package.name, package.version )
self.install( string: file, reinstall: true )
file = _get_bin( package.name, package.version )
install( string: file, reinstall: true )
end
# Package.uninstall( :name => 'package', :version => '0.1.1' )
@ -267,10 +267,10 @@ class Package < ApplicationModel
def self.uninstall( data )
if data[:string]
package = self._parse( data[:string] )
package = _parse( data[:string] )
else
file = self._get_bin( data[:name], data[:version] )
package = self._parse(file)
file = _get_bin( data[:name], data[:version] )
package = _parse(file)
end
# package meta data
@ -289,7 +289,7 @@ class Package < ApplicationModel
permission = element.attributes['permission'] || '644'
base64 = element.text
content = Base64.decode64(base64)
content = self._delete_file(location, permission, content)
content = _delete_file(location, permission, content)
end
# prebuild assets

View file

@ -62,7 +62,7 @@ class RecentView < ApplicationModel
end
def self.list_full( user, limit = 10 )
recent_viewed = self.list( user, limit )
recent_viewed = list( user, limit )
# get related object
assets = ApplicationModel.assets_of_object_list(recent_viewed)
@ -75,7 +75,7 @@ class RecentView < ApplicationModel
def notify_clients
Sessions.send_to(
self.created_by_id,
created_by_id,
{
event: 'RecentView::changed',
data: {}

View file

@ -22,7 +22,7 @@ class Scheduler < ApplicationModel
jobs.each {|job|
next if jobs_started[ job.id ]
jobs_started[ job.id ] = true
self.start_job( job, runner, runner_count )
start_job( job, runner, runner_count )
}
sleep 90
end
@ -35,7 +35,7 @@ class Scheduler < ApplicationModel
Thread.new {
if job.period
loop do
self._start_job( job, runner, runner_count )
_start_job( job, runner, runner_count )
job = Scheduler.lookup( id: job.id )
# exit is job got deleted
@ -51,7 +51,7 @@ class Scheduler < ApplicationModel
sleep job.period
end
else
self._start_job( job, runner, runner_count )
_start_job( job, runner, runner_count )
end
# raise "Exception from thread"
job.pid = ''
@ -90,7 +90,7 @@ class Scheduler < ApplicationModel
# restart job again
if try_run_max > try_count
self._start_job( job, runner, runner_count, try_count, try_run_time)
_start_job( job, runner, runner_count, try_count, try_run_time)
else
raise "STOP thread for #{job.method} (runner #{runner} of #{runner_count} after #{try_count} tries"
end

View file

@ -47,7 +47,7 @@ class Setting < ApplicationModel
end
def self.get(name)
self.load
load
@@current[:settings_config][name]
end
@ -58,15 +58,15 @@ class Setting < ApplicationModel
end
def set_initial
self.state_initial = self.state
self.state_initial = state
end
def state_check
return if !(self.state || self.state == false)
return if !(state || state == false)
return if !( !self.state.respond_to?('has_key?') || !self.state.key?(:value) )
return if !( !state.respond_to?('has_key?') || !state.key?(:value) )
self.state = { value: self.state }
self.state = { value: state }
end
end

View file

@ -140,17 +140,17 @@ returns
end
def content
file = Store::File.find_by( id: self.store_file_id )
file = Store::File.find_by( id: store_file_id )
if !file
fail "No such file #{ self.store_file_id }!"
fail "No such file #{ store_file_id }!"
end
file.content
end
def provider
file = Store::File.find_by( id: self.store_file_id )
file = Store::File.find_by( id: store_file_id )
if !file
fail "No such file #{ self.store_file_id }!"
fail "No such file #{ store_file_id }!"
end
file.provider
end

View file

@ -17,7 +17,7 @@ class Store
if !adapter_name
fail 'Missing storage_provider setting option'
end
adapter = self.load_adapter( "Store::Provider::#{ adapter_name }" )
adapter = load_adapter( "Store::Provider::#{ adapter_name }" )
adapter.add( data, sha )
file = Store::File.create(
provider: adapter_name,
@ -29,12 +29,12 @@ class Store
# read content
def content
adapter = self.class.load_adapter("Store::Provider::#{ self.provider }")
if self.sha
c = adapter.get( self.sha )
adapter = self.class.load_adapter("Store::Provider::#{ provider }")
if sha
c = adapter.get( sha )
else
# fallback until migration is done
c = Store::Provider::DB.find_by( md5: self.md5 ).data
c = Store::Provider::DB.find_by( md5: md5 ).data
end
c
end
@ -86,8 +86,8 @@ class Store
private
def destroy_provider
adapter = self.class.load_adapter("Store::Provider::#{ self.provider }")
adapter.delete( self.sha )
adapter = self.class.load_adapter("Store::Provider::#{ provider }")
adapter.delete( sha )
end
end
end

View file

@ -11,10 +11,10 @@ class Tag < ApplicationModel
# lookups
if data[:object]
tag_object_id = self.tag_object_lookup( data[:object] )
tag_object_id = tag_object_lookup( data[:object] )
end
if data[:item]
tag_item_id = self.tag_item_lookup( data[:item] )
tag_item_id = tag_item_lookup( data[:item] )
end
# create history
@ -31,10 +31,10 @@ class Tag < ApplicationModel
# lookups
if data[:object]
tag_object_id = self.tag_object_lookup( data[:object] )
tag_object_id = tag_object_lookup( data[:object] )
end
if data[:item]
tag_item_id = self.tag_item_lookup( data[:item] )
tag_item_id = tag_item_lookup( data[:item] )
end
# create history
@ -48,14 +48,14 @@ class Tag < ApplicationModel
end
def self.tag_list( data )
tag_object_id_requested = self.tag_object_lookup( data[:object] )
tag_object_id_requested = tag_object_lookup( data[:object] )
tag_search = Tag.where(
tag_object_id: tag_object_id_requested,
o_id: data[:o_id],
)
tags = []
tag_search.each {|tag|
tags.push self.tag_item_lookup_id( tag.tag_item_id )
tags.push tag_item_lookup_id( tag.tag_item_id )
}
tags
end

View file

@ -76,7 +76,7 @@ returns
=end
def agent_of_group
Group.find( self.group_id ).users.where( active: true ).joins(:roles).where( 'roles.name' => Z_ROLENAME_AGENT, 'roles.active' => true ).uniq()
Group.find( group_id ).users.where( active: true ).joins(:roles).where( 'roles.name' => Z_ROLENAME_AGENT, 'roles.active' => true ).uniq()
end
=begin
@ -128,10 +128,10 @@ returns
def merge_to(data)
# update articles
Ticket::Article.where( ticket_id: self.id ).each(&:touch)
Ticket::Article.where( ticket_id: id ).each(&:touch)
# quiet update of reassign of articles
Ticket::Article.where( ticket_id: self.id ).update_all( ['ticket_id = ?', data[:ticket_id] ] )
Ticket::Article.where( ticket_id: id ).update_all( ['ticket_id = ?', data[:ticket_id] ] )
# touch new ticket (to broadcast change)
Ticket.find( data[:ticket_id] ).touch
@ -140,7 +140,7 @@ returns
# create new merge article
Ticket::Article.create(
ticket_id: self.id,
ticket_id: id,
type_id: Ticket::Article::Type.lookup( name: 'note' ).id,
sender_id: Ticket::Article::Sender.lookup( name: Z_ROLENAME_AGENT ).id,
body: 'merged',
@ -157,7 +157,7 @@ returns
link_object_source: 'Ticket',
link_object_source_value: data[:ticket_id],
link_object_target: 'Ticket',
link_object_target_value: self.id
link_object_target_value: id
)
# set state to 'merged'
@ -167,7 +167,7 @@ returns
self.owner_id = User.find_by( login: '-' ).id
# save ticket
self.save
save
end
=begin
@ -184,7 +184,7 @@ returns
=end
def online_notification_seen_state
state = Ticket::State.lookup( id: self.state_id )
state = Ticket::State.lookup( id: state_id )
state_type = Ticket::StateType.lookup( id: state.state_type_id )
return true if state_type.name == 'closed'
return true if state_type.name == 'merged'
@ -194,26 +194,26 @@ returns
private
def check_generate
return if self.number
return if number
self.number = Ticket::Number.generate
end
def check_title
return if !self.title
return if !title
self.title.gsub!(/\s|\t|\r/, ' ')
title.gsub!(/\s|\t|\r/, ' ')
end
def check_defaults
if !self.owner_id
if !owner_id
self.owner_id = 1
end
return if !self.customer_id
return if !customer_id
customer = User.find( self.customer_id )
return if self.organization_id == customer.organization_id
customer = User.find( customer_id )
return if organization_id == customer.organization_id
self.organization_id = customer.organization_id
end
@ -221,10 +221,10 @@ returns
def reset_pending_time
# ignore if no state has changed
return if !self.changes['state_id']
return if !changes['state_id']
# check if new state isn't pending*
current_state = Ticket::State.lookup( id: self.state_id )
current_state = Ticket::State.lookup( id: state_id )
current_state_type = Ticket::StateType.lookup( id: current_state.state_type_id )
# in case, set pending_time to nil
@ -236,10 +236,10 @@ returns
def destroy_dependencies
# delete articles
self.articles.destroy_all
articles.destroy_all
# destroy online notifications
OnlineNotification.remove( self.class.to_s, self.id )
OnlineNotification.remove( self.class.to_s, id )
end
end

View file

@ -31,7 +31,7 @@ returns
object: self.class.name,
group_id: self['group_id'],
role: role,
created_at: self.updated_at,
created_at: updated_at,
created_by_id: user_id,
)
end

View file

@ -31,9 +31,9 @@ class Ticket::Article < ApplicationModel
def check_subject
return if !self.subject
return if !subject
self.subject.gsub!(/\s|\t|\r/, ' ')
subject.gsub!(/\s|\t|\r/, ' ')
end
class Flag < ApplicationModel

View file

@ -25,14 +25,14 @@ returns
return if !self.class.activity_stream_support_config
role = self.class.activity_stream_support_config[:role]
ticket = Ticket.lookup( id: self.ticket_id )
ticket = Ticket.lookup( id: ticket_id )
ActivityStream.add(
o_id: self['id'],
type: type,
object: self.class.name,
group_id: ticket.group_id,
role: role,
created_at: self.updated_at,
created_at: updated_at,
created_by_id: user_id,
)
end

View file

@ -26,19 +26,19 @@ returns
if !data[ Ticket.to_app_model ]
data[ Ticket.to_app_model ] = {}
end
if !data[ Ticket.to_app_model ][ self.ticket_id ]
ticket = Ticket.find( self.ticket_id )
if !data[ Ticket.to_app_model ][ ticket_id ]
ticket = Ticket.find( ticket_id )
data = ticket.assets(data)
end
if !data[ Ticket::Article.to_app_model ]
data[ Ticket::Article.to_app_model ] = {}
end
if !data[ Ticket::Article.to_app_model ][ self.id ]
data[ Ticket::Article.to_app_model ][ self.id ] = self.attributes
if !data[ Ticket::Article.to_app_model ][ id ]
data[ Ticket::Article.to_app_model ][ id ] = attributes
# add attachment list to article
data[ Ticket::Article.to_app_model ][ self.id ]['attachments'] = self.attachments
data[ Ticket::Article.to_app_model ][ id ]['attachments'] = attachments
end
%w(created_by_id updated_by_id).each {|item|

View file

@ -26,8 +26,8 @@ returns
if !data[ Ticket.to_app_model ]
data[ Ticket.to_app_model ] = {}
end
if !data[ Ticket.to_app_model ][ self.id ]
data[ Ticket.to_app_model ][ self.id ] = self.attributes_with_associations
if !data[ Ticket.to_app_model ][ id ]
data[ Ticket.to_app_model ][ id ] = attributes_with_associations
end
%w(created_by_id updated_by_id owner_id customer_id).each {|item|
next if !self[ item ]

View file

@ -37,17 +37,17 @@ returns
def escalation_calculation
# set escalation off if ticket is already closed
state = Ticket::State.lookup( id: self.state_id )
state = Ticket::State.lookup( id: state_id )
if state.ignore_escalation?
# nothing to change
return true if !self.escalation_time
return true if !escalation_time
self.escalation_time = nil
# self.first_response_escal_date = nil
# self.close_time_escal_date = nil
self.callback_loop = true
self.save
save
return true
end
@ -58,13 +58,13 @@ returns
if !sla_selected
# nothing to change
return true if !self.escalation_time
return true if !escalation_time
self.escalation_time = nil
# self.first_response_escal_date = nil
# self.close_time_escal_date = nil
self.callback_loop = true
self.save
save
return true
end
@ -79,82 +79,82 @@ returns
if sla_selected.first_response_time
# get escalation date without pending time
self.first_response_escal_date = TimeCalculation.dest_time( self.created_at, sla_selected.first_response_time, sla_selected.data, sla_selected.timezone )
self.first_response_escal_date = TimeCalculation.dest_time( created_at, sla_selected.first_response_time, sla_selected.data, sla_selected.timezone )
# get pending time between created and first response escal. time
time_in_pending = escalation_suspend( self.created_at, self.first_response_escal_date, 'relative', sla_selected, sla_selected.first_response_time )
time_in_pending = escalation_suspend( created_at, first_response_escal_date, 'relative', sla_selected, sla_selected.first_response_time )
# get new escalation time (original escal_date + time_in_pending)
self.first_response_escal_date = TimeCalculation.dest_time( self.first_response_escal_date, time_in_pending.to_i, sla_selected.data, sla_selected.timezone )
self.first_response_escal_date = TimeCalculation.dest_time( first_response_escal_date, time_in_pending.to_i, sla_selected.data, sla_selected.timezone )
# set ticket escalation
self.escalation_time = calculation_higher_time( self.escalation_time, self.first_response_escal_date, self.first_response )
self.escalation_time = calculation_higher_time( escalation_time, first_response_escal_date, first_response )
end
if self.first_response# && !self.first_response_in_min
if first_response# && !self.first_response_in_min
# get response time in min between created and first response
self.first_response_in_min = escalation_suspend( self.created_at, self.first_response, 'real', sla_selected )
self.first_response_in_min = escalation_suspend( created_at, first_response, 'real', sla_selected )
end
# set time to show if sla is raised ot in
if sla_selected.first_response_time && self.first_response_in_min
self.first_response_diff_in_min = sla_selected.first_response_time - self.first_response_in_min
if sla_selected.first_response_time && first_response_in_min
self.first_response_diff_in_min = sla_selected.first_response_time - first_response_in_min
end
# update time
last_update = self.last_contact_agent
last_update = last_contact_agent
if !last_update
last_update = self.created_at
last_update = created_at
end
if sla_selected.update_time
self.update_time_escal_date = TimeCalculation.dest_time( last_update, sla_selected.update_time, sla_selected.data, sla_selected.timezone )
# get pending time between created and update escal. time
time_in_pending = escalation_suspend( last_update, self.update_time_escal_date, 'relative', sla_selected, sla_selected.update_time )
time_in_pending = escalation_suspend( last_update, update_time_escal_date, 'relative', sla_selected, sla_selected.update_time )
# get new escalation time (original escal_date + time_in_pending)
self.update_time_escal_date = TimeCalculation.dest_time( self.update_time_escal_date, time_in_pending.to_i, sla_selected.data, sla_selected.timezone )
self.update_time_escal_date = TimeCalculation.dest_time( update_time_escal_date, time_in_pending.to_i, sla_selected.data, sla_selected.timezone )
# set ticket escalation
self.escalation_time = calculation_higher_time( self.escalation_time, self.update_time_escal_date, false )
self.escalation_time = calculation_higher_time( escalation_time, update_time_escal_date, false )
end
if self.last_contact_agent
self.update_time_in_min = TimeCalculation.business_time_diff( self.created_at, self.last_contact_agent, sla_selected.data, sla_selected.timezone )
if last_contact_agent
self.update_time_in_min = TimeCalculation.business_time_diff( created_at, last_contact_agent, sla_selected.data, sla_selected.timezone )
end
# set sla time
if sla_selected.update_time && self.update_time_in_min
self.update_time_diff_in_min = sla_selected.update_time - self.update_time_in_min
if sla_selected.update_time && update_time_in_min
self.update_time_diff_in_min = sla_selected.update_time - update_time_in_min
end
# close time
if sla_selected.close_time
# get escalation date without pending time
self.close_time_escal_date = TimeCalculation.dest_time( self.created_at, sla_selected.close_time, sla_selected.data, sla_selected.timezone )
self.close_time_escal_date = TimeCalculation.dest_time( created_at, sla_selected.close_time, sla_selected.data, sla_selected.timezone )
# get pending time between created and close escal. time
extended_escalation = escalation_suspend( self.created_at, self.close_time_escal_date, 'relative', sla_selected, sla_selected.close_time )
extended_escalation = escalation_suspend( created_at, close_time_escal_date, 'relative', sla_selected, sla_selected.close_time )
# get new escalation time (original escal_date + time_in_pending)
self.close_time_escal_date = TimeCalculation.dest_time( self.close_time_escal_date, extended_escalation.to_i, sla_selected.data, sla_selected.timezone )
self.close_time_escal_date = TimeCalculation.dest_time( close_time_escal_date, extended_escalation.to_i, sla_selected.data, sla_selected.timezone )
# set ticket escalation
self.escalation_time = calculation_higher_time( self.escalation_time, self.close_time_escal_date, self.close_time )
self.escalation_time = calculation_higher_time( escalation_time, close_time_escal_date, close_time )
end
if self.close_time # && !self.close_time_in_min
self.close_time_in_min = escalation_suspend( self.created_at, self.close_time, 'real', sla_selected )
if close_time # && !self.close_time_in_min
self.close_time_in_min = escalation_suspend( created_at, close_time, 'real', sla_selected )
end
# set sla time
if sla_selected.close_time && self.close_time_in_min
self.close_time_diff_in_min = sla_selected.close_time - self.close_time_in_min
if sla_selected.close_time && close_time_in_min
self.close_time_diff_in_min = sla_selected.close_time - close_time_in_min
end
return if !self.changed?
self.callback_loop = true
self.save
save
end
=begin
@ -220,7 +220,7 @@ returns
total_time_without_pending = 0
total_time = 0
#get history for ticket
history_list = self.history_get
history_list = history_get
#loop through hist. changes and get time
last_state = nil

View file

@ -56,7 +56,7 @@ returns
def self.list (data)
overviews = self.all(data)
overviews = all(data)
return if !overviews
# build up attributes hash

View file

@ -21,11 +21,11 @@ returns
if data[:current_user].is_role('Customer')
# access ok if its own ticket
return true if self.customer_id == data[:current_user].id
return true if customer_id == data[:current_user].id
# access ok if its organization ticket
if data[:current_user].organization_id && self.organization_id
return true if self.organization_id == data[:current_user].organization_id
if data[:current_user].organization_id && organization_id
return true if organization_id == data[:current_user].organization_id
end
# no access
@ -35,7 +35,7 @@ returns
# check agent
# access if requestor is owner
return true if self.owner_id == data[:current_user].id
return true if owner_id == data[:current_user].id
# access if requestor is in group
data[:current_user].groups.each {|group|

View file

@ -32,7 +32,7 @@ returns
# for performance reasons, Model.search_index_reload will only collect if of object
# get whole data here
ticket = self.class.find(self.id)
ticket = self.class.find(id)
# remove ignored attributes
attributes = ticket.attributes
@ -42,7 +42,7 @@ returns
}
# add tags
tags = Tag.tag_list( object: 'Ticket', o_id: self.id )
tags = Tag.tag_list( object: 'Ticket', o_id: id )
if tags && !tags.empty?
attributes[:tag] = tags
end
@ -57,7 +57,7 @@ returns
attachment_max_size_in_mb = Setting.get('es_attachment_max_size_in_mb') || 40
# collect article data
articles = Ticket::Article.where( ticket_id: self.id )
articles = Ticket::Article.where( ticket_id: id )
attributes['articles'] = []
articles.each {|article|
article_attributes = article.attributes

View file

@ -47,7 +47,7 @@ returns:
def ignore_escalation?
ignore_escalation = %w(removed closed merged)
return true if ignore_escalation.include?( self.name )
return true if ignore_escalation.include?( name )
false
end
end

View file

@ -18,7 +18,7 @@ returns
def subject_build (subject)
# clena subject
subject = self.subject_clean(subject)
subject = subject_clean(subject)
ticket_hook = Setting.get('ticket_hook')
ticket_hook_divider = Setting.get('ticket_hook_divider')
@ -30,11 +30,11 @@ returns
# right position
if Setting.get('ticket_hook_position') == 'right'
return subject + " [#{ticket_hook}#{ticket_hook_divider}#{self.number}] "
return subject + " [#{ticket_hook}#{ticket_hook_divider}#{number}] "
end
# left position
"[#{ticket_hook}#{ticket_hook_divider}#{self.number}] " + subject
"[#{ticket_hook}#{ticket_hook_divider}#{number}] " + subject
end
=begin
@ -56,14 +56,14 @@ returns
ticket_subject_size = Setting.get('ticket_subject_size')
# remove all possible ticket hook formats with []
subject = subject.gsub(/\[#{ticket_hook}: #{self.number}\](\s+?|)/, '')
subject = subject.gsub(/\[#{ticket_hook}:#{self.number}\](\s+?|)/, '')
subject = subject.gsub(/\[#{ticket_hook}#{ticket_hook_divider}#{self.number}\](\s+?|)/, '')
subject = subject.gsub(/\[#{ticket_hook}: #{number}\](\s+?|)/, '')
subject = subject.gsub(/\[#{ticket_hook}:#{number}\](\s+?|)/, '')
subject = subject.gsub(/\[#{ticket_hook}#{ticket_hook_divider}#{number}\](\s+?|)/, '')
# remove all possible ticket hook formats without []
subject = subject.gsub(/#{ticket_hook}: #{self.number}(\s+?|)/, '')
subject = subject.gsub(/#{ticket_hook}:#{self.number}(\s+?|)/, '')
subject = subject.gsub(/#{ticket_hook}#{ticket_hook_divider}#{self.number}(\s+?|)/, '')
subject = subject.gsub(/#{ticket_hook}: #{number}(\s+?|)/, '')
subject = subject.gsub(/#{ticket_hook}:#{number}(\s+?|)/, '')
subject = subject.gsub(/#{ticket_hook}#{ticket_hook_divider}#{number}(\s+?|)/, '')
# remove leading "..:\s" and "..[\d+]:\s" e. g. "Re: " or "Re[5]: "
subject = subject.gsub(/^(..(\[\d+\])?:\s)+/, '')

View file

@ -32,7 +32,7 @@ class Token < ActiveRecord::Base
loop do
self.name = SecureRandom.hex(20)
break if !Token.exists?( name: self.name )
break if !Token.exists?( name: name )
end
end
end

View file

@ -170,11 +170,11 @@ translate strings in ruby context, e. g. for notifications
def set_initial
return if target_initial
self.target_initial = self.target
self.target_initial = target
end
def cache_clear
Cache.delete( 'Translation::' + self.locale.downcase )
Cache.delete( 'Translation::' + locale.downcase )
end
def self.cache_set(locale, data)
Cache.write( 'Translation::' + locale.downcase, data )

View file

@ -28,7 +28,7 @@ class TypeLookup < ApplicationModel
end
# create
lookup = self.create(
lookup = create(
name: name
)
@@cache_object[ name ] = lookup.id

View file

@ -87,17 +87,17 @@ returns
def fullname
fullname = ''
if self.firstname && !self.firstname.empty?
fullname = fullname + self.firstname
if firstname && !firstname.empty?
fullname = fullname + firstname
end
if self.lastname && !self.lastname.empty?
if lastname && !lastname.empty?
if fullname != ''
fullname = fullname + ' '
end
fullname = fullname + self.lastname
fullname = fullname + lastname
end
if fullname == '' && self.email
fullname = self.email
if fullname == '' && email
fullname = email
end
fullname
end
@ -116,7 +116,7 @@ returns
=end
def is_role( role_name )
self.roles.each { |role|
roles.each { |role|
return role if role.name == role_name
}
false
@ -250,7 +250,7 @@ returns
url = hash['info']['urls']['Website'] || hash['info']['urls']['Twitter'] || ''
end
roles = Role.where( name: 'Customer' )
self.create(
create(
login: hash['info']['nickname'] || hash['uid'],
firstname: hash['info']['name'],
email: hash['info']['email'],
@ -401,21 +401,21 @@ returns
self.login_failed = 0
# set updated by user
self.updated_by_id = self.id
self.updated_by_id = id
self.save
save
end
private
def check_name
if ( self.firstname && !self.firstname.empty? ) && ( !self.lastname || self.lastname.empty? )
if ( firstname && !firstname.empty? ) && ( !lastname || lastname.empty? )
# Lastname, Firstname
scan = self.firstname.scan(/, /)
scan = firstname.scan(/, /)
if scan[0]
name = self.firstname.split(', ', 2)
name = firstname.split(', ', 2)
if !name[0].nil?
self.lastname = name[0]
end
@ -426,7 +426,7 @@ returns
end
# Firstname Lastname
name = self.firstname.split(' ', 2)
name = firstname.split(' ', 2)
if !name[0].nil?
self.firstname = name[0]
end
@ -436,8 +436,8 @@ returns
return
# -no name- firstname.lastname@example.com
elsif ( !self.firstname || self.firstname.empty? ) && ( !self.lastname || self.lastname.empty? ) && ( self.email && !self.email.empty? )
scan = self.email.scan(/^(.+?)\.(.+?)\@.+?$/)
elsif ( !firstname || firstname.empty? ) && ( !lastname || lastname.empty? ) && ( email && !email.empty? )
scan = email.scan(/^(.+?)\.(.+?)\@.+?$/)
if scan[0]
if !scan[0][0].nil?
self.firstname = scan[0][0].capitalize
@ -451,34 +451,34 @@ returns
def check_email
return if !self.email
return if !email
self.email = self.email.downcase
self.email = email.downcase
end
def check_login
# use email as login if not given
if !self.login && self.email
self.login = self.email
if !login && email
self.login = email
end
# if email has changed, login is old email, change also login
if self.changes && self.changes['email']
if self.changes['email'][0] == self.login
self.login = self.email
if changes && changes['email']
if changes['email'][0] == login
self.login = email
end
end
# check if login already exists
return if !self.login
return if !login
self.login = self.login.downcase
self.login = login.downcase
check = true
while check
exists = User.find_by( login: self.login )
if exists && exists.id != self.id
self.login = self.login + rand(999).to_s
exists = User.find_by( login: login )
if exists && exists.id != id
self.login = login + rand(999).to_s
else
check = false
end
@ -487,49 +487,49 @@ returns
def avatar_check
return if !self.email
return if self.email.empty?
return if !email
return if email.empty?
# save/update avatar
avatar = Avatar.auto_detection(
object: 'User',
o_id: self.id,
url: self.email,
o_id: id,
url: email,
source: 'app',
updated_by_id: self.updated_by_id,
created_by_id: self.updated_by_id,
updated_by_id: updated_by_id,
created_by_id: updated_by_id,
)
# update user link
return if !avatar
self.update_column( :image, avatar.store_hash )
self.cache_delete
update_column( :image, avatar.store_hash )
cache_delete
end
def avatar_destroy
Avatar.remove( 'User', self.id )
Avatar.remove( 'User', id )
end
def check_password
# set old password again if not given
if self.password == '' || !self.password
if password == '' || !password
# get current record
if self.id
if id
#current = User.find(self.id)
#self.password = current.password
self.password = self.password_was
self.password = password_was
end
end
# crypt password if not already crypted
return if !self.password
return if self.password =~ /^\{sha2\}/
return if !password
return if password =~ /^\{sha2\}/
crypted = Digest::SHA2.hexdigest( self.password )
crypted = Digest::SHA2.hexdigest( password )
self.password = "{sha2}#{crypted}"
end
end

View file

@ -26,8 +26,8 @@ returns
if !data[ User.to_app_model ]
data[ User.to_app_model ] = {}
end
if !data[ User.to_app_model ][ self.id ]
attributes = self.attributes_with_associations
if !data[ User.to_app_model ][ id ]
attributes = attributes_with_associations
# do not transfer crypted pw
attributes['password'] = ''
@ -42,7 +42,7 @@ returns
}
end
data[ User.to_app_model ][ self.id ] = attributes
data[ User.to_app_model ][ id ] = attributes
# get roles
if attributes['role_ids']

View file

@ -22,7 +22,7 @@ returns
if data[:current_user].is_role(Z_ROLENAME_CUSTOMER)
# access ok if its own user
return true if self.id == data[:current_user].id
return true if id == data[:current_user].id
# no access
return false

View file

@ -22,7 +22,7 @@ returns
return if !adapter
# load backend
self.load_adapter(adapter)
load_adapter(adapter)
end
=begin

View file

@ -40,7 +40,7 @@ returns
next if !config_item[:adapter]
# load backend
backend = self.load_adapter( config_item[:adapter] )
backend = load_adapter( config_item[:adapter] )
next if !backend
user_auth = backend.check( username, password, config_item, user )

View file

@ -1,6 +1,6 @@
class Class
def to_app_model_url
camel_cased_word = self.to_s
camel_cased_word = to_s
camel_cased_word.gsub(/::/, '_')
.gsub(/([A-Z]+)([A-Z][a-z])/, '\1_\2')
.gsub(/([a-z\d])([A-Z])/, '\1_\2')
@ -9,7 +9,7 @@ class Class
end
def to_app_model
camel_cased_word = self.to_s
camel_cased_word = to_s
camel_cased_word.gsub(/::/, '').to_sym
end
end

View file

@ -1,6 +1,6 @@
class String
def message_quote
quote = self.split("\n")
quote = split("\n")
body_quote = ''
quote.each do |line|
body_quote = body_quote + '> ' + line + "\n"
@ -40,7 +40,7 @@ class String
# More details: http://pjambet.github.io/blog/emojis-and-mysql/
def utf8_to_3bytesutf8
return if ActiveRecord::Base.connection_config[:adapter] != 'mysql2'
self.each_char.select {|c|
each_char.select {|c|
if c.bytes.count > 3
Rails.logger.warn "strip out 4 bytes utf8 chars '#{c}' of '#{ self }'"
next

View file

@ -30,7 +30,7 @@ returns
def self.location(address)
# load backend
backend = self.load_adapter_by_setting( 'geo_ip_backend' )
backend = load_adapter_by_setting( 'geo_ip_backend' )
return if !backend
# db lookup

View file

@ -18,7 +18,7 @@ returns
def self.geocode(address)
# load backend
backend = self.load_adapter_by_setting( 'geo_location_backend' )
backend = load_adapter_by_setting( 'geo_location_backend' )
return if !backend
# db lookup
@ -40,7 +40,7 @@ returns
def self.reverse_geocode(lat, lng)
# load backend
backend = self.load_adapter_by_setting( 'geo_location_backend' )
backend = load_adapter_by_setting( 'geo_location_backend' )
return if !backend
# db lookup

View file

@ -207,7 +207,7 @@ module Import::OTRS
=end
def self.connection_test
self.request_json({})
request_json({})
end
=begin
@ -235,7 +235,7 @@ module Import::OTRS
end
# retrive statistic
statistic = self.request_json( { Subaction: 'List' }, 1)
statistic = request_json( { Subaction: 'List' }, 1)
if statistic
Cache.write('import_otrs_stats', statistic)
end
@ -264,7 +264,7 @@ module Import::OTRS
=end
def self.current_state
data = self.statistic
data = statistic
base = Group.count + Ticket::State.count + Ticket::Priority.count
base_total = data['Queue'] + data['State'] + data['Priority']
user = User.count
@ -415,7 +415,7 @@ module Import::OTRS
def self.diff_worker
return if !Setting.get('import_mode')
return if Setting.get('import_otrs_endpoint') == 'http://otrs_host/otrs'
self.diff
diff
end
def self.diff
@ -453,7 +453,7 @@ module Import::OTRS
organization(organizations)
# get changed tickets
self.ticket_diff
ticket_diff
end

View file

@ -56,7 +56,7 @@ returns
# send update to browser
if session && session['id']
self.send(
send(
client_id,
{
event: 'ws:login',
@ -110,7 +110,7 @@ returns
=end
def self.session_exists?(client_id)
client_ids = self.sessions
client_ids = sessions
client_ids.include? client_id.to_s
end
@ -146,10 +146,10 @@ returns
=end
def self.list
client_ids = self.sessions
client_ids = sessions
session_list = {}
client_ids.each { |client_id|
data = self.get(client_id)
data = get(client_id)
next if !data
session_list[client_id] = data
}
@ -210,7 +210,7 @@ returns
=end
def self.touch( client_id )
data = self.get(client_id)
data = get(client_id)
return false if !data
path = "#{@path}/#{client_id}"
data[:meta][:last_ping] = Time.new.to_i.to_s
@ -246,12 +246,12 @@ returns
session_file = "#{session_dir}/session"
data = nil
if !File.exist? session_dir
self.destory(client_id)
destory(client_id)
Rails.logger.error "missing session directory for '#{client_id}', remove session."
return
end
if !File.exist? session_file
self.destory(client_id)
destory(client_id)
Rails.logger.errror "missing session file for '#{client_id}', remove session."
return
end
@ -262,13 +262,13 @@ returns
file.flock( File::LOCK_UN )
data_json = JSON.parse( all )
if data_json
data = self.symbolize_keys(data_json)
data = symbolize_keys(data_json)
data[:user] = data_json['user'] # for compat. reasons
end
}
rescue Exception => e
Rails.logger.error e.inspect
self.destory(client_id)
destory(client_id)
Rails.logger.error "ERROR: reading session file '#{session_file}', remove session."
return
end
@ -327,7 +327,7 @@ returns
def self.send_to( user_id, data )
# list all current clients
client_list = self.sessions
client_list = sessions
client_list.each {|client_id|
session = Sessions.get(client_id)
next if !session
@ -354,7 +354,7 @@ returns
def self.broadcast( data )
# list all current clients
client_list = self.sessions
client_list = sessions
client_list.each {|client_id|
Sessions.send( client_id, data )
}
@ -508,7 +508,7 @@ returns
Thread.abort_on_exception = true
loop do
client_ids = self.sessions
client_ids = sessions
client_ids.each { |client_id|
# connection already open, ignore

View file

@ -35,13 +35,13 @@ class Sessions::Backend::ActivityStream
def push
# check timeout
timeout = Sessions::CacheIn.get( self.client_key )
timeout = Sessions::CacheIn.get( client_key )
return if timeout
# set new timeout
Sessions::CacheIn.set( self.client_key, true, { expires_in: @ttl.seconds } )
Sessions::CacheIn.set( client_key, true, { expires_in: @ttl.seconds } )
data = self.load
data = load
return if !data || data.empty?

View file

@ -5,7 +5,7 @@ class Sessions::Backend::Collections
@client = client
@client_id = client_id
@ttl = ttl
@backends = self.backend
@backends = backend
end
def push

View file

@ -42,11 +42,11 @@ class Sessions::Backend::Collections::Base
end
# check timeout
timeout = Sessions::CacheIn.get( self.client_key )
timeout = Sessions::CacheIn.get( client_key )
return if timeout
# set new timeout
Sessions::CacheIn.set( self.client_key, true, { expires_in: @ttl.seconds } )
Sessions::CacheIn.set( client_key, true, { expires_in: @ttl.seconds } )
# check if update has been done
last_change = self.class.model.constantize.latest_change
@ -54,7 +54,7 @@ class Sessions::Backend::Collections::Base
@last_change = last_change
# load current data
items = self.load
items = load
return if !items || items.empty?

View file

@ -16,14 +16,14 @@ class Sessions::Backend::Rss
def load
# check timeout
cache = Sessions::CacheIn.get( self.collection_key )
cache = Sessions::CacheIn.get( collection_key )
return cache if cache
url = 'http://www.heise.de/newsticker/heise-atom.xml'
rss_items = Rss.fetch( url, 8 )
# set new timeout
Sessions::CacheIn.set( self.collection_key, rss_items, { expires_in: 1.hours } )
Sessions::CacheIn.set( collection_key, rss_items, { expires_in: 1.hours } )
rss_items
end
@ -35,13 +35,13 @@ class Sessions::Backend::Rss
def push
# check timeout
timeout = Sessions::CacheIn.get( self.client_key )
timeout = Sessions::CacheIn.get( client_key )
return if timeout
# set new timeout
Sessions::CacheIn.set( self.client_key, true, { expires_in: @ttl.seconds } )
Sessions::CacheIn.set( client_key, true, { expires_in: @ttl.seconds } )
data = self.load
data = load
return if !data || data.empty?

View file

@ -33,13 +33,13 @@ class Sessions::Backend::TicketCreate
def push
# check timeout
timeout = Sessions::CacheIn.get( self.client_key )
timeout = Sessions::CacheIn.get( client_key )
return if timeout
# set new timeout
Sessions::CacheIn.set( self.client_key, true, { expires_in: @ttl.seconds } )
Sessions::CacheIn.set( client_key, true, { expires_in: @ttl.seconds } )
ticket_create_attributes = self.load
ticket_create_attributes = load
return if !ticket_create_attributes

View file

@ -34,10 +34,10 @@ class Sessions::Backend::TicketOverviewIndex
def push
# check check interval
return if Sessions::CacheIn.get( self.client_key )
return if Sessions::CacheIn.get( client_key )
# reset check interval
Sessions::CacheIn.set( self.client_key, true, { expires_in: @ttl.seconds } )
Sessions::CacheIn.set( client_key, true, { expires_in: @ttl.seconds } )
# check if min one ticket has changed
last_ticket_change = Ticket.latest_change
@ -45,7 +45,7 @@ class Sessions::Backend::TicketOverviewIndex
@last_ticket_change = last_ticket_change
# load current data
data = self.load
data = load
return if !data

View file

@ -46,11 +46,11 @@ class Sessions::Backend::TicketOverviewList
# check interval
Rails.logger.debug "LOG 1 #{@user.inspect}"
return if Sessions::CacheIn.get( self.client_key )
return if Sessions::CacheIn.get( client_key )
# reset check interval
Rails.logger.debug "LOG 2 #{@ttl.seconds}"
Sessions::CacheIn.set( self.client_key, true, { expires_in: @ttl.seconds } )
Sessions::CacheIn.set( client_key, true, { expires_in: @ttl.seconds } )
# check if min one ticket has changed
last_ticket_change = Ticket.latest_change
@ -59,7 +59,7 @@ class Sessions::Backend::TicketOverviewList
@last_ticket_change = last_ticket_change
# load current data
items = self.load
items = load
Rails.logger.debug "LOG 4 #{items.inspect}"
return if !items

View file

@ -47,7 +47,7 @@ module Sessions::CacheIn
def self.get( key, params = {} )
# puts 'CacheIn.get:' + key + '-' + @@data[ key ].inspect
return if self.expired( key, params )
return if expired( key, params )
@@data[ key ]
end
end

View file

@ -2,9 +2,9 @@ class Sessions::Client
def initialize( client_id )
@client_id = client_id
self.log '---client start ws connection---'
self.fetch
self.log '---client exiting ws connection---'
log '---client start ws connection---'
fetch
log '---client exiting ws connection---'
end
def fetch
@ -49,12 +49,12 @@ class Sessions::Client
end
loop_count += 1
self.log "---client - looking for data of user #{user.id}"
log "---client - looking for data of user #{user.id}"
# push messages from backends
backend_pool.each(&:push)
self.log '---/client-'
log '---/client-'
# start faster in the beginnig
if loop_count < 20

View file

@ -51,7 +51,7 @@ returns
next if !config_item[:adapter]
# load backend
backend = self.load_adapter( config_item[:adapter] )
backend = load_adapter( config_item[:adapter] )
next if !backend
user_auth = backend.check( params, config_item )