2012-07-23 22:22:23 +00:00
|
|
|
require 'json'
|
2013-05-05 22:54:06 +00:00
|
|
|
require 'session_helper'
|
2012-07-23 22:22:23 +00:00
|
|
|
|
2013-08-21 18:35:22 +00:00
|
|
|
module Sessions
|
2013-01-24 01:01:47 +00:00
|
|
|
|
|
|
|
# get application root directory
|
|
|
|
@root = Dir.pwd.to_s
|
2013-02-01 00:01:20 +00:00
|
|
|
if !@root || @root.empty? || @root == '/'
|
2013-01-24 01:01:47 +00:00
|
|
|
@root = Rails.root
|
|
|
|
end
|
|
|
|
|
|
|
|
# get working directories
|
2016-05-16 17:49:06 +00:00
|
|
|
@path = "#{@root}/tmp/websocket_#{Rails.env}"
|
2013-01-24 01:01:47 +00:00
|
|
|
|
|
|
|
# create global vars for threads
|
2015-05-08 13:47:27 +00:00
|
|
|
@@client_threads = {} # rubocop:disable Style/ClassVars
|
2012-07-23 22:22:23 +00:00
|
|
|
|
2014-06-27 06:43:37 +00:00
|
|
|
=begin
|
|
|
|
|
|
|
|
start new session
|
|
|
|
|
2016-05-16 17:49:06 +00:00
|
|
|
Sessions.create(client_id, session_data, { type: 'websocket' })
|
2014-06-27 06:43:37 +00:00
|
|
|
|
|
|
|
returns
|
|
|
|
|
|
|
|
true|false
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
2015-12-08 13:22:13 +00:00
|
|
|
def self.create(client_id, session, meta)
|
2015-05-03 09:22:48 +00:00
|
|
|
path = "#{@path}/#{client_id}"
|
|
|
|
path_tmp = "#{@path}/tmp/#{client_id}"
|
|
|
|
session_file = "#{path_tmp}/session"
|
|
|
|
|
|
|
|
# collect session data
|
2015-05-10 20:53:15 +00:00
|
|
|
meta[:last_ping] = Time.now.utc.to_i
|
2015-05-03 08:40:10 +00:00
|
|
|
data = {
|
|
|
|
user: session,
|
|
|
|
meta: meta,
|
|
|
|
}
|
2015-05-03 09:22:48 +00:00
|
|
|
content = data.to_json
|
|
|
|
|
|
|
|
# store session data in session file
|
|
|
|
FileUtils.mkpath path_tmp
|
2015-12-08 13:22:13 +00:00
|
|
|
File.open(session_file, 'wb') { |file|
|
2015-05-03 08:40:10 +00:00
|
|
|
file.write content
|
2012-07-23 22:22:23 +00:00
|
|
|
}
|
2012-11-02 16:10:22 +00:00
|
|
|
|
2016-11-30 10:30:03 +00:00
|
|
|
# destroy old session if needed
|
2015-12-08 13:22:13 +00:00
|
|
|
if File.exist?(path)
|
2016-11-30 10:30:03 +00:00
|
|
|
Sessions.destroy(client_id)
|
2015-05-03 12:11:47 +00:00
|
|
|
end
|
|
|
|
|
2015-05-03 09:22:48 +00:00
|
|
|
# move to destination directory
|
2015-12-08 13:22:13 +00:00
|
|
|
FileUtils.mv(path_tmp, path)
|
2015-05-03 09:22:48 +00:00
|
|
|
|
2012-11-02 16:10:22 +00:00
|
|
|
# send update to browser
|
2014-09-21 12:50:39 +00:00
|
|
|
if session && session['id']
|
2015-05-07 12:10:38 +00:00
|
|
|
send(
|
2015-04-27 15:21:17 +00:00
|
|
|
client_id,
|
|
|
|
{
|
|
|
|
event: 'ws:login',
|
|
|
|
data: { success: true },
|
|
|
|
}
|
|
|
|
)
|
2012-11-02 16:10:22 +00:00
|
|
|
end
|
2012-07-23 22:22:23 +00:00
|
|
|
end
|
|
|
|
|
2014-06-27 06:43:37 +00:00
|
|
|
=begin
|
|
|
|
|
|
|
|
list of all session
|
|
|
|
|
|
|
|
client_ids = Sessions.sessions
|
|
|
|
|
|
|
|
returns
|
|
|
|
|
|
|
|
['4711', '4712']
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
|
|
|
def self.sessions
|
2015-05-03 09:22:48 +00:00
|
|
|
path = "#{@path}/"
|
2014-06-27 06:43:37 +00:00
|
|
|
|
|
|
|
# just make sure that spool path exists
|
2015-12-08 13:22:13 +00:00
|
|
|
if !File.exist?(path)
|
2014-06-27 06:43:37 +00:00
|
|
|
FileUtils.mkpath path
|
|
|
|
end
|
|
|
|
|
|
|
|
data = []
|
2015-12-08 13:22:13 +00:00
|
|
|
Dir.foreach(path) do |entry|
|
2015-05-03 09:22:48 +00:00
|
|
|
next if entry == '.'
|
|
|
|
next if entry == '..'
|
|
|
|
next if entry == 'tmp'
|
|
|
|
next if entry == 'spool'
|
2014-06-27 06:43:37 +00:00
|
|
|
data.push entry.to_s
|
|
|
|
end
|
|
|
|
data
|
|
|
|
end
|
|
|
|
|
|
|
|
=begin
|
|
|
|
|
|
|
|
list of all session
|
|
|
|
|
|
|
|
Sessions.session_exists?(client_id)
|
|
|
|
|
|
|
|
returns
|
|
|
|
|
|
|
|
true|false
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
|
|
|
def self.session_exists?(client_id)
|
2015-05-07 12:10:38 +00:00
|
|
|
client_ids = sessions
|
2014-06-27 06:43:37 +00:00
|
|
|
client_ids.include? client_id.to_s
|
|
|
|
end
|
|
|
|
|
|
|
|
=begin
|
|
|
|
|
|
|
|
list of all session with data
|
|
|
|
|
|
|
|
client_ids_with_data = Sessions.list
|
|
|
|
|
|
|
|
returns
|
|
|
|
|
|
|
|
{
|
|
|
|
'4711' => {
|
2015-12-08 13:22:13 +00:00
|
|
|
user: {
|
2014-10-05 12:38:30 +00:00
|
|
|
'id' => 123,
|
2014-06-27 06:43:37 +00:00
|
|
|
},
|
2015-12-08 13:22:13 +00:00
|
|
|
meta: {
|
|
|
|
type: 'websocket',
|
|
|
|
last_ping: time_of_last_ping,
|
2014-06-27 06:43:37 +00:00
|
|
|
}
|
|
|
|
},
|
|
|
|
'4712' => {
|
2015-12-08 13:22:13 +00:00
|
|
|
user: {
|
2014-10-05 12:38:30 +00:00
|
|
|
'id' => 124,
|
2014-06-27 06:43:37 +00:00
|
|
|
},
|
2015-12-08 13:22:13 +00:00
|
|
|
meta: {
|
|
|
|
type: 'ajax',
|
|
|
|
last_ping: time_of_last_ping,
|
2014-06-27 06:43:37 +00:00
|
|
|
}
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
|
|
|
def self.list
|
2015-05-07 12:10:38 +00:00
|
|
|
client_ids = sessions
|
2014-06-27 06:43:37 +00:00
|
|
|
session_list = {}
|
|
|
|
client_ids.each { |client_id|
|
2015-05-07 12:10:38 +00:00
|
|
|
data = get(client_id)
|
2014-06-27 06:43:37 +00:00
|
|
|
next if !data
|
|
|
|
session_list[client_id] = data
|
|
|
|
}
|
|
|
|
session_list
|
|
|
|
end
|
|
|
|
|
|
|
|
=begin
|
|
|
|
|
|
|
|
destroy session
|
|
|
|
|
2016-11-30 10:30:03 +00:00
|
|
|
Sessions.destroy(client_id)
|
2014-06-27 06:43:37 +00:00
|
|
|
|
|
|
|
returns
|
|
|
|
|
|
|
|
true|false
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
2016-11-30 10:30:03 +00:00
|
|
|
def self.destroy(client_id)
|
2015-05-03 09:22:48 +00:00
|
|
|
path = "#{@path}/#{client_id}"
|
2014-06-27 06:43:37 +00:00
|
|
|
FileUtils.rm_rf path
|
|
|
|
end
|
|
|
|
|
|
|
|
=begin
|
|
|
|
|
|
|
|
destroy idle session
|
|
|
|
|
2016-11-30 10:30:03 +00:00
|
|
|
list_of_client_ids = Sessions.destroy_idle_sessions
|
2014-06-27 06:43:37 +00:00
|
|
|
|
|
|
|
returns
|
|
|
|
|
|
|
|
['4711', '4712']
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
2016-11-30 10:30:03 +00:00
|
|
|
def self.destroy_idle_sessions(idle_time_in_sec = 240)
|
2014-06-27 06:43:37 +00:00
|
|
|
list_of_closed_sessions = []
|
2015-05-03 09:22:48 +00:00
|
|
|
clients = Sessions.list
|
2014-06-27 06:43:37 +00:00
|
|
|
clients.each { |client_id, client|
|
2015-05-10 19:47:17 +00:00
|
|
|
if !client[:meta] || !client[:meta][:last_ping] || ( client[:meta][:last_ping].to_i + idle_time_in_sec ) < Time.now.utc.to_i
|
2014-06-27 06:43:37 +00:00
|
|
|
list_of_closed_sessions.push client_id
|
2016-11-30 10:30:03 +00:00
|
|
|
Sessions.destroy(client_id)
|
2014-06-27 06:43:37 +00:00
|
|
|
end
|
|
|
|
}
|
|
|
|
list_of_closed_sessions
|
|
|
|
end
|
|
|
|
|
|
|
|
=begin
|
|
|
|
|
|
|
|
touch session
|
|
|
|
|
|
|
|
Sessions.touch(client_id)
|
|
|
|
|
|
|
|
returns
|
|
|
|
|
|
|
|
true|false
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
2015-12-08 13:22:13 +00:00
|
|
|
def self.touch(client_id)
|
2015-05-07 12:10:38 +00:00
|
|
|
data = get(client_id)
|
2014-06-27 06:43:37 +00:00
|
|
|
return false if !data
|
2015-05-03 09:22:48 +00:00
|
|
|
path = "#{@path}/#{client_id}"
|
2015-05-10 20:53:15 +00:00
|
|
|
data[:meta][:last_ping] = Time.now.utc.to_i
|
2015-05-03 09:22:48 +00:00
|
|
|
content = data.to_json
|
2016-05-16 17:49:06 +00:00
|
|
|
File.open("#{path}/session", 'wb' ) { |file|
|
2015-05-03 09:22:48 +00:00
|
|
|
file.write content
|
2014-06-27 06:43:37 +00:00
|
|
|
}
|
|
|
|
true
|
|
|
|
end
|
|
|
|
|
|
|
|
=begin
|
|
|
|
|
|
|
|
get session data
|
|
|
|
|
|
|
|
data = Sessions.get(client_id)
|
|
|
|
|
|
|
|
returns
|
|
|
|
|
|
|
|
{
|
2015-12-08 13:22:13 +00:00
|
|
|
user: {
|
2014-10-05 12:38:30 +00:00
|
|
|
'id' => 123,
|
2014-06-27 06:43:37 +00:00
|
|
|
},
|
2015-12-08 13:22:13 +00:00
|
|
|
meta: {
|
|
|
|
type: 'websocket',
|
|
|
|
last_ping: time_of_last_ping,
|
2014-06-27 06:43:37 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
2015-12-08 13:22:13 +00:00
|
|
|
def self.get(client_id)
|
2015-05-03 09:22:48 +00:00
|
|
|
session_dir = "#{@path}/#{client_id}"
|
|
|
|
session_file = "#{session_dir}/session"
|
|
|
|
data = nil
|
2015-05-12 08:46:07 +00:00
|
|
|
|
|
|
|
# if no session dir exists, session got destoried
|
2015-05-03 12:11:47 +00:00
|
|
|
if !File.exist? session_dir
|
2016-11-30 10:30:03 +00:00
|
|
|
destroy(client_id)
|
2015-05-13 13:27:25 +00:00
|
|
|
log('debug', "missing session directory for '#{client_id}', remove session.")
|
2015-05-03 12:11:47 +00:00
|
|
|
return
|
|
|
|
end
|
2015-05-12 08:46:07 +00:00
|
|
|
|
|
|
|
# if only session file is missing, then it's an error behavior
|
2014-10-23 18:08:00 +00:00
|
|
|
if !File.exist? session_file
|
2016-11-30 10:30:03 +00:00
|
|
|
destroy(client_id)
|
2015-05-13 13:27:25 +00:00
|
|
|
log('error', "missing session file for '#{client_id}', remove session.")
|
2014-10-23 18:08:00 +00:00
|
|
|
return
|
|
|
|
end
|
2014-06-27 06:43:37 +00:00
|
|
|
begin
|
2015-12-08 13:22:13 +00:00
|
|
|
File.open(session_file, 'rb') { |file|
|
|
|
|
file.flock(File::LOCK_EX)
|
2014-06-27 06:43:37 +00:00
|
|
|
all = file.read
|
2015-12-08 13:22:13 +00:00
|
|
|
file.flock(File::LOCK_UN)
|
|
|
|
data_json = JSON.parse(all)
|
2015-05-05 14:14:18 +00:00
|
|
|
if data_json
|
2015-05-10 20:53:15 +00:00
|
|
|
data = symbolize_keys(data_json)
|
2015-05-05 14:14:18 +00:00
|
|
|
data[:user] = data_json['user'] # for compat. reasons
|
2015-01-13 14:53:15 +00:00
|
|
|
end
|
2014-06-27 06:43:37 +00:00
|
|
|
}
|
2015-05-08 14:09:24 +00:00
|
|
|
rescue => e
|
2015-05-13 13:27:25 +00:00
|
|
|
log('error', e.inspect)
|
2016-11-30 10:30:03 +00:00
|
|
|
destroy(client_id)
|
2015-05-13 13:27:25 +00:00
|
|
|
log('error', "error in reading/parsing session file '#{session_file}', remove session.")
|
2014-06-27 06:43:37 +00:00
|
|
|
return
|
|
|
|
end
|
|
|
|
data
|
|
|
|
end
|
|
|
|
|
|
|
|
=begin
|
|
|
|
|
|
|
|
send message to client
|
|
|
|
|
|
|
|
Sessions.send(client_id_of_recipient, data)
|
|
|
|
|
|
|
|
returns
|
|
|
|
|
|
|
|
true|false
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
2015-12-08 13:22:13 +00:00
|
|
|
def self.send(client_id, data)
|
2015-05-03 09:22:48 +00:00
|
|
|
path = "#{@path}/#{client_id}/"
|
2015-07-03 15:18:01 +00:00
|
|
|
filename = "send-#{Time.now.utc.to_f}"
|
2016-09-08 19:18:26 +00:00
|
|
|
location = "#{path}#{filename}"
|
2015-05-03 09:22:48 +00:00
|
|
|
check = true
|
|
|
|
count = 0
|
2014-06-27 06:43:37 +00:00
|
|
|
while check
|
2016-09-08 19:18:26 +00:00
|
|
|
if File.exist?(location)
|
2014-06-27 06:43:37 +00:00
|
|
|
count += 1
|
2016-09-08 19:18:26 +00:00
|
|
|
location = "#{path}#{filename}-#{count}"
|
2014-06-27 06:43:37 +00:00
|
|
|
else
|
|
|
|
check = false
|
|
|
|
end
|
|
|
|
end
|
|
|
|
return false if !File.directory? path
|
2016-09-08 19:18:26 +00:00
|
|
|
begin
|
|
|
|
File.open(location, 'wb') { |file|
|
|
|
|
file.flock(File::LOCK_EX)
|
|
|
|
file.write data.to_json
|
|
|
|
file.flock(File::LOCK_UN)
|
|
|
|
file.close
|
|
|
|
}
|
|
|
|
rescue => e
|
|
|
|
log('error', e.inspect)
|
|
|
|
log('error', "error in writing message file '#{location}'")
|
|
|
|
return false
|
|
|
|
end
|
2014-06-27 06:43:37 +00:00
|
|
|
true
|
|
|
|
end
|
|
|
|
|
|
|
|
=begin
|
|
|
|
|
2014-08-25 15:44:57 +00:00
|
|
|
send message to recipient client
|
|
|
|
|
|
|
|
Sessions.send_to(user_id, data)
|
|
|
|
|
|
|
|
returns
|
|
|
|
|
|
|
|
true|false
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
2015-12-08 13:22:13 +00:00
|
|
|
def self.send_to(user_id, data)
|
2014-08-25 15:44:57 +00:00
|
|
|
|
|
|
|
# list all current clients
|
2015-05-07 12:10:38 +00:00
|
|
|
client_list = sessions
|
2016-06-30 20:04:48 +00:00
|
|
|
client_list.each { |client_id|
|
2014-08-25 15:44:57 +00:00
|
|
|
session = Sessions.get(client_id)
|
|
|
|
next if !session
|
|
|
|
next if !session[:user]
|
2014-10-05 12:38:30 +00:00
|
|
|
next if !session[:user]['id']
|
|
|
|
next if session[:user]['id'].to_i != user_id.to_i
|
2015-12-08 13:22:13 +00:00
|
|
|
Sessions.send(client_id, data)
|
2014-08-25 15:44:57 +00:00
|
|
|
}
|
|
|
|
true
|
|
|
|
end
|
|
|
|
|
|
|
|
=begin
|
|
|
|
|
2015-12-08 13:22:13 +00:00
|
|
|
send message to all authenticated client
|
2014-06-27 06:43:37 +00:00
|
|
|
|
|
|
|
Sessions.broadcast(data)
|
|
|
|
|
|
|
|
returns
|
|
|
|
|
2016-05-26 08:14:51 +00:00
|
|
|
[array_with_client_ids_of_recipients]
|
2014-06-27 06:43:37 +00:00
|
|
|
|
2016-05-25 07:19:45 +00:00
|
|
|
broadcase also to not authenticated client
|
|
|
|
|
2016-05-26 21:40:10 +00:00
|
|
|
Sessions.broadcast(data, 'public') # public|authenticated
|
2016-05-25 07:19:45 +00:00
|
|
|
|
|
|
|
broadcase also not to sender
|
|
|
|
|
|
|
|
Sessions.broadcast(data, 'public', sender_user_id)
|
|
|
|
|
2014-06-27 06:43:37 +00:00
|
|
|
=end
|
|
|
|
|
2016-05-26 21:40:10 +00:00
|
|
|
def self.broadcast(data, recipient = 'authenticated', sender_user_id = nil)
|
2014-06-27 06:43:37 +00:00
|
|
|
|
|
|
|
# list all current clients
|
2016-05-26 08:14:51 +00:00
|
|
|
recipients = []
|
2015-05-07 12:10:38 +00:00
|
|
|
client_list = sessions
|
2016-06-30 20:04:48 +00:00
|
|
|
client_list.each { |client_id|
|
2015-12-08 13:22:13 +00:00
|
|
|
session = Sessions.get(client_id)
|
|
|
|
next if !session
|
2016-05-25 07:19:45 +00:00
|
|
|
|
|
|
|
if recipient != 'public'
|
|
|
|
next if !session[:user]
|
|
|
|
next if !session[:user]['id']
|
|
|
|
end
|
|
|
|
|
|
|
|
if sender_user_id
|
|
|
|
next if session[:user] && session[:user]['id'] && session[:user]['id'].to_i == sender_user_id.to_i
|
|
|
|
end
|
2015-12-08 13:22:13 +00:00
|
|
|
Sessions.send(client_id, data)
|
2016-05-26 08:14:51 +00:00
|
|
|
recipients.push client_id
|
2014-06-27 06:43:37 +00:00
|
|
|
}
|
2016-05-26 08:14:51 +00:00
|
|
|
recipients
|
2014-06-27 06:43:37 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
=begin
|
|
|
|
|
|
|
|
get messages for client
|
|
|
|
|
|
|
|
messages = Sessions.queue(client_id_of_recipient)
|
|
|
|
|
|
|
|
returns
|
|
|
|
|
|
|
|
[
|
|
|
|
{
|
|
|
|
key1 => 'some data of message 1',
|
|
|
|
key2 => 'some data of message 1',
|
|
|
|
},
|
|
|
|
{
|
|
|
|
key1 => 'some data of message 2',
|
|
|
|
key2 => 'some data of message 2',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
2015-12-08 13:22:13 +00:00
|
|
|
def self.queue(client_id)
|
2015-05-03 09:22:48 +00:00
|
|
|
path = "#{@path}/#{client_id}/"
|
|
|
|
data = []
|
2014-06-27 06:43:37 +00:00
|
|
|
files = []
|
2016-06-30 20:04:48 +00:00
|
|
|
Dir.foreach(path) { |entry|
|
2015-05-03 09:22:48 +00:00
|
|
|
next if entry == '.'
|
|
|
|
next if entry == '..'
|
2014-06-27 06:43:37 +00:00
|
|
|
files.push entry
|
|
|
|
}
|
2016-06-30 20:04:48 +00:00
|
|
|
files.sort.each { |entry|
|
2015-05-03 09:22:48 +00:00
|
|
|
filename = "#{path}/#{entry}"
|
2016-01-15 17:22:57 +00:00
|
|
|
if /^send/ =~ entry
|
2015-12-08 13:22:13 +00:00
|
|
|
data.push Sessions.queue_file_read(path, entry)
|
2014-06-27 06:43:37 +00:00
|
|
|
end
|
|
|
|
}
|
|
|
|
data
|
|
|
|
end
|
|
|
|
|
2015-12-08 13:22:13 +00:00
|
|
|
def self.queue_file_read(path, filename)
|
2015-05-03 09:22:48 +00:00
|
|
|
file_old = "#{path}#{filename}"
|
|
|
|
file_new = "#{path}a-#{filename}"
|
2015-12-08 13:22:13 +00:00
|
|
|
FileUtils.mv(file_old, file_new)
|
2014-06-27 06:43:37 +00:00
|
|
|
all = ''
|
2015-12-08 13:22:13 +00:00
|
|
|
File.open(file_new, 'rb') { |file|
|
2014-06-27 06:43:37 +00:00
|
|
|
all = file.read
|
|
|
|
}
|
2015-12-08 13:22:13 +00:00
|
|
|
File.delete(file_new)
|
|
|
|
JSON.parse(all)
|
2014-06-27 06:43:37 +00:00
|
|
|
end
|
|
|
|
|
2015-05-03 09:22:48 +00:00
|
|
|
def self.cleanup
|
|
|
|
path = "#{@path}/spool/"
|
|
|
|
FileUtils.rm_rf path
|
|
|
|
path = "#{@path}/tmp/"
|
2014-06-29 19:30:55 +00:00
|
|
|
FileUtils.rm_rf path
|
|
|
|
end
|
|
|
|
|
2015-12-09 13:09:37 +00:00
|
|
|
def self.spool_create(data)
|
|
|
|
msg = JSON.generate(data)
|
2015-05-03 09:22:48 +00:00
|
|
|
path = "#{@path}/spool/"
|
2013-06-10 07:01:37 +00:00
|
|
|
FileUtils.mkpath path
|
2015-12-09 13:09:37 +00:00
|
|
|
file_path = "#{path}/#{Time.now.utc.to_f}-#{rand(99_999)}"
|
2016-05-16 17:49:06 +00:00
|
|
|
File.open(file_path, 'wb') { |file|
|
2013-06-10 07:01:37 +00:00
|
|
|
data = {
|
2015-04-27 13:42:53 +00:00
|
|
|
msg: msg,
|
2015-05-10 19:47:17 +00:00
|
|
|
timestamp: Time.now.utc.to_i,
|
2013-06-10 07:01:37 +00:00
|
|
|
}
|
|
|
|
file.write data.to_json
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2015-12-08 13:22:13 +00:00
|
|
|
def self.spool_list(timestamp, current_user_id)
|
2015-05-03 09:22:48 +00:00
|
|
|
path = "#{@path}/spool/"
|
2013-06-10 07:01:37 +00:00
|
|
|
FileUtils.mkpath path
|
2015-05-03 09:22:48 +00:00
|
|
|
data = []
|
2013-06-10 07:01:37 +00:00
|
|
|
to_delete = []
|
2015-05-03 09:22:48 +00:00
|
|
|
files = []
|
2016-06-30 20:04:48 +00:00
|
|
|
Dir.foreach(path) { |entry|
|
2015-05-03 09:22:48 +00:00
|
|
|
next if entry == '.'
|
|
|
|
next if entry == '..'
|
2013-06-17 08:43:18 +00:00
|
|
|
files.push entry
|
|
|
|
}
|
2016-06-30 20:04:48 +00:00
|
|
|
files.sort.each { |entry|
|
2015-05-03 09:22:48 +00:00
|
|
|
filename = "#{path}/#{entry}"
|
2015-12-08 13:22:13 +00:00
|
|
|
next if !File.exist?(filename)
|
|
|
|
File.open(filename, 'rb') { |file|
|
2015-05-03 09:22:48 +00:00
|
|
|
all = file.read
|
2015-12-08 13:22:13 +00:00
|
|
|
spool = JSON.parse(all)
|
2013-06-10 07:01:37 +00:00
|
|
|
begin
|
2015-12-08 13:22:13 +00:00
|
|
|
message_parsed = JSON.parse(spool['msg'])
|
2013-06-10 07:01:37 +00:00
|
|
|
rescue => e
|
2015-07-03 15:18:01 +00:00
|
|
|
log('error', "can't parse spool message: #{message}, #{e.inspect}")
|
2013-06-10 07:01:37 +00:00
|
|
|
next
|
|
|
|
end
|
|
|
|
|
|
|
|
# ignore message older then 48h
|
2015-05-10 19:47:17 +00:00
|
|
|
if spool['timestamp'] + (2 * 86_400) < Time.now.utc.to_i
|
2015-05-03 09:22:48 +00:00
|
|
|
to_delete.push "#{path}/#{entry}"
|
2013-06-10 07:01:37 +00:00
|
|
|
next
|
|
|
|
end
|
|
|
|
|
|
|
|
# add spool attribute to push spool info to clients
|
2013-06-28 22:26:04 +00:00
|
|
|
message_parsed['spool'] = true
|
2013-06-10 07:01:37 +00:00
|
|
|
|
|
|
|
# only send not already now messages
|
|
|
|
if !timestamp || timestamp < spool['timestamp']
|
|
|
|
|
|
|
|
# spool to recipient list
|
2013-06-28 22:26:04 +00:00
|
|
|
if message_parsed['recipient'] && message_parsed['recipient']['user_id']
|
2015-05-07 09:04:40 +00:00
|
|
|
|
2013-06-28 22:26:04 +00:00
|
|
|
message_parsed['recipient']['user_id'].each { |user_id|
|
2015-05-07 09:04:40 +00:00
|
|
|
|
|
|
|
next if current_user_id != user_id
|
|
|
|
|
2015-12-09 13:09:37 +00:00
|
|
|
message = message_parsed
|
|
|
|
if message_parsed['event'] == 'broadcast'
|
|
|
|
message = message_parsed['data']
|
|
|
|
end
|
|
|
|
|
2015-05-07 09:04:40 +00:00
|
|
|
item = {
|
|
|
|
type: 'direct',
|
2015-12-09 13:09:37 +00:00
|
|
|
message: message,
|
2015-05-07 09:04:40 +00:00
|
|
|
}
|
|
|
|
data.push item
|
2013-06-10 07:01:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
# spool to every client
|
|
|
|
else
|
2015-12-09 13:09:37 +00:00
|
|
|
message = message_parsed
|
|
|
|
if message_parsed['event'] == 'broadcast'
|
|
|
|
message = message_parsed['data']
|
|
|
|
end
|
2013-06-10 07:01:37 +00:00
|
|
|
item = {
|
2015-04-27 13:42:53 +00:00
|
|
|
type: 'broadcast',
|
2015-12-09 13:09:37 +00:00
|
|
|
message: message,
|
2013-06-10 07:01:37 +00:00
|
|
|
}
|
|
|
|
data.push item
|
|
|
|
end
|
|
|
|
end
|
|
|
|
}
|
2013-06-17 08:43:18 +00:00
|
|
|
}
|
2016-06-30 20:04:48 +00:00
|
|
|
to_delete.each { |file|
|
2013-06-10 07:01:37 +00:00
|
|
|
File.delete(file)
|
|
|
|
}
|
2015-04-30 17:20:27 +00:00
|
|
|
data
|
2013-06-10 07:01:37 +00:00
|
|
|
end
|
|
|
|
|
2012-07-23 22:22:23 +00:00
|
|
|
def self.jobs
|
2013-01-15 06:41:37 +00:00
|
|
|
|
|
|
|
# just make sure that spool path exists
|
2015-12-08 13:22:13 +00:00
|
|
|
if !File.exist?(@path)
|
2013-01-15 23:10:27 +00:00
|
|
|
FileUtils.mkpath @path
|
|
|
|
end
|
2013-01-15 06:41:37 +00:00
|
|
|
|
2012-08-03 22:46:05 +00:00
|
|
|
Thread.abort_on_exception = true
|
2015-05-05 14:10:06 +00:00
|
|
|
loop do
|
2015-05-07 12:10:38 +00:00
|
|
|
client_ids = sessions
|
2012-07-23 22:22:23 +00:00
|
|
|
client_ids.each { |client_id|
|
|
|
|
|
2014-06-29 19:30:55 +00:00
|
|
|
# connection already open, ignore
|
2012-08-07 21:53:43 +00:00
|
|
|
next if @@client_threads[client_id]
|
|
|
|
|
2013-06-10 07:01:37 +00:00
|
|
|
# get current user
|
2015-12-08 13:22:13 +00:00
|
|
|
session_data = Sessions.get(client_id)
|
2012-11-26 05:04:44 +00:00
|
|
|
next if !session_data
|
|
|
|
next if !session_data[:user]
|
2014-10-05 12:38:30 +00:00
|
|
|
next if !session_data[:user]['id']
|
2015-04-27 13:42:53 +00:00
|
|
|
user = User.lookup( id: session_data[:user]['id'] )
|
2012-08-03 22:46:05 +00:00
|
|
|
next if !user
|
|
|
|
|
|
|
|
# start client thread
|
2015-05-07 09:04:40 +00:00
|
|
|
next if @@client_threads[client_id]
|
|
|
|
|
|
|
|
@@client_threads[client_id] = true
|
|
|
|
@@client_threads[client_id] = Thread.new {
|
|
|
|
thread_client(client_id)
|
|
|
|
@@client_threads[client_id] = nil
|
2015-05-13 13:27:25 +00:00
|
|
|
log('debug', "close client (#{client_id}) thread")
|
2015-05-07 09:04:40 +00:00
|
|
|
ActiveRecord::Base.connection.close
|
|
|
|
}
|
|
|
|
sleep 0.5
|
2012-07-23 22:22:23 +00:00
|
|
|
}
|
2012-08-07 06:43:15 +00:00
|
|
|
|
|
|
|
# system settings
|
2012-11-26 23:22:52 +00:00
|
|
|
sleep 0.5
|
2012-07-23 22:22:23 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-06-29 19:30:55 +00:00
|
|
|
=begin
|
|
|
|
|
|
|
|
check if thread for client_id is running
|
|
|
|
|
|
|
|
Sessions.thread_client_exists?(client_id)
|
|
|
|
|
|
|
|
returns
|
|
|
|
|
|
|
|
thread
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
|
|
|
def self.thread_client_exists?(client_id)
|
|
|
|
@@client_threads[client_id]
|
2013-12-01 11:55:21 +00:00
|
|
|
end
|
|
|
|
|
2014-06-29 19:30:55 +00:00
|
|
|
=begin
|
|
|
|
|
|
|
|
start client for browser
|
|
|
|
|
|
|
|
Sessions.thread_client(client_id)
|
|
|
|
|
|
|
|
returns
|
|
|
|
|
|
|
|
thread
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
2015-05-10 19:47:17 +00:00
|
|
|
def self.thread_client(client_id, try_count = 0, try_run_time = Time.now.utc)
|
2015-05-13 13:27:25 +00:00
|
|
|
log('debug', "LOOP #{client_id} - #{try_count}")
|
2014-06-29 19:30:55 +00:00
|
|
|
begin
|
|
|
|
Sessions::Client.new(client_id)
|
|
|
|
rescue => e
|
2015-07-03 15:18:01 +00:00
|
|
|
log('error', "thread_client #{client_id} exited with error #{e.inspect}")
|
2015-05-13 13:27:25 +00:00
|
|
|
log('error', e.backtrace.join("\n ") )
|
2014-06-29 19:30:55 +00:00
|
|
|
sleep 10
|
2013-12-01 11:55:21 +00:00
|
|
|
begin
|
2014-06-29 19:30:55 +00:00
|
|
|
ActiveRecord::Base.connection_pool.release_connection
|
2013-12-01 11:55:21 +00:00
|
|
|
rescue => e
|
2015-07-03 15:18:01 +00:00
|
|
|
log('error', "Can't reconnect to database #{e.inspect}")
|
2014-06-29 19:30:55 +00:00
|
|
|
end
|
2014-04-24 05:49:30 +00:00
|
|
|
|
2014-06-29 19:30:55 +00:00
|
|
|
try_run_max = 10
|
|
|
|
try_count += 1
|
2014-04-24 05:49:30 +00:00
|
|
|
|
2014-06-29 19:30:55 +00:00
|
|
|
# reset error counter if to old
|
2015-05-10 19:47:17 +00:00
|
|
|
if try_run_time + ( 60 * 5 ) < Time.now.utc
|
2014-06-29 19:30:55 +00:00
|
|
|
try_count = 0
|
|
|
|
end
|
2015-05-10 19:47:17 +00:00
|
|
|
try_run_time = Time.now.utc
|
2014-04-24 05:49:30 +00:00
|
|
|
|
2014-06-29 19:30:55 +00:00
|
|
|
# restart job again
|
|
|
|
if try_run_max > try_count
|
|
|
|
thread_client(client_id, try_count, try_run_time)
|
|
|
|
else
|
|
|
|
raise "STOP thread_client for client #{client_id} after #{try_run_max} tries"
|
2013-12-01 11:55:21 +00:00
|
|
|
end
|
2014-06-29 19:30:55 +00:00
|
|
|
end
|
2015-05-13 13:27:25 +00:00
|
|
|
log('debug', "/LOOP #{client_id} - #{try_count}")
|
2013-12-01 11:55:21 +00:00
|
|
|
end
|
|
|
|
|
2015-01-13 14:53:15 +00:00
|
|
|
def self.symbolize_keys(hash)
|
2016-06-30 20:04:48 +00:00
|
|
|
hash.each_with_object({}) { |(key, value), result|
|
2015-01-13 14:53:15 +00:00
|
|
|
new_key = case key
|
|
|
|
when String then key.to_sym
|
|
|
|
else key
|
|
|
|
end
|
|
|
|
new_value = case value
|
|
|
|
when Hash then symbolize_keys(value)
|
|
|
|
else value
|
|
|
|
end
|
|
|
|
result[new_key] = new_value
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2015-05-13 13:27:25 +00:00
|
|
|
# we use it in rails and non rails context
|
|
|
|
def self.log(level, message)
|
|
|
|
if defined?(Rails)
|
|
|
|
if level == 'debug'
|
|
|
|
Rails.logger.debug message
|
|
|
|
elsif level == 'notice'
|
|
|
|
Rails.logger.notice message
|
|
|
|
else
|
|
|
|
Rails.logger.error message
|
|
|
|
end
|
|
|
|
return
|
|
|
|
end
|
|
|
|
puts "#{Time.now.utc.iso8601}:#{level} #{message}" # rubocop:disable Rails/Output
|
|
|
|
end
|
2015-04-27 14:15:29 +00:00
|
|
|
end
|