Fixed issue #1183 - Repetitive json error on production log.
This commit is contained in:
parent
88e3d9e574
commit
9e03183e9e
2 changed files with 22 additions and 17 deletions
|
@ -214,9 +214,10 @@ returns
|
||||||
return false if !data
|
return false if !data
|
||||||
path = "#{@path}/#{client_id}"
|
path = "#{@path}/#{client_id}"
|
||||||
data[:meta][:last_ping] = Time.now.utc.to_i
|
data[:meta][:last_ping] = Time.now.utc.to_i
|
||||||
content = data.to_json
|
|
||||||
File.open("#{path}/session", 'wb' ) { |file|
|
File.open("#{path}/session", 'wb' ) { |file|
|
||||||
file.write content
|
file.flock(File::LOCK_EX)
|
||||||
|
file.write data.to_json
|
||||||
|
file.flock(File::LOCK_UN)
|
||||||
}
|
}
|
||||||
true
|
true
|
||||||
end
|
end
|
||||||
|
@ -261,7 +262,7 @@ returns
|
||||||
end
|
end
|
||||||
begin
|
begin
|
||||||
File.open(session_file, 'rb') { |file|
|
File.open(session_file, 'rb') { |file|
|
||||||
file.flock(File::LOCK_EX)
|
file.flock(File::LOCK_SH)
|
||||||
all = file.read
|
all = file.read
|
||||||
file.flock(File::LOCK_UN)
|
file.flock(File::LOCK_UN)
|
||||||
data_json = JSON.parse(all)
|
data_json = JSON.parse(all)
|
||||||
|
@ -432,14 +433,15 @@ returns
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.queue_file_read(path, filename)
|
def self.queue_file_read(path, filename)
|
||||||
file_old = "#{path}#{filename}"
|
location = "#{path}#{filename}"
|
||||||
file_new = "#{path}a-#{filename}"
|
|
||||||
FileUtils.mv(file_old, file_new)
|
|
||||||
message = ''
|
message = ''
|
||||||
File.open(file_new, 'rb') { |file|
|
File.open(location, 'rb') { |file|
|
||||||
|
file.flock(File::LOCK_EX)
|
||||||
message = file.read
|
message = file.read
|
||||||
|
file.flock(File::LOCK_UN)
|
||||||
}
|
}
|
||||||
File.delete(file_new)
|
File.delete(location)
|
||||||
|
return if message.blank?
|
||||||
begin
|
begin
|
||||||
return JSON.parse(message)
|
return JSON.parse(message)
|
||||||
rescue => e
|
rescue => e
|
||||||
|
@ -466,13 +468,15 @@ remove all session and spool messages
|
||||||
msg = JSON.generate(data)
|
msg = JSON.generate(data)
|
||||||
path = "#{@path}/spool/"
|
path = "#{@path}/spool/"
|
||||||
FileUtils.mkpath path
|
FileUtils.mkpath path
|
||||||
file_path = "#{path}/#{Time.now.utc.to_f}-#{rand(99_999)}"
|
|
||||||
File.open(file_path, 'wb') { |file|
|
|
||||||
data = {
|
data = {
|
||||||
msg: msg,
|
msg: msg,
|
||||||
timestamp: Time.now.utc.to_i,
|
timestamp: Time.now.utc.to_i,
|
||||||
}
|
}
|
||||||
|
file_path = "#{path}/#{Time.now.utc.to_f}-#{rand(99_999)}"
|
||||||
|
File.open(file_path, 'wb') { |file|
|
||||||
|
file.flock(File::LOCK_EX)
|
||||||
file.write data.to_json
|
file.write data.to_json
|
||||||
|
file.flock(File::LOCK_UN)
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -491,7 +495,9 @@ remove all session and spool messages
|
||||||
filename = "#{path}/#{entry}"
|
filename = "#{path}/#{entry}"
|
||||||
next if !File.exist?(filename)
|
next if !File.exist?(filename)
|
||||||
File.open(filename, 'rb') { |file|
|
File.open(filename, 'rb') { |file|
|
||||||
|
file.flock(File::LOCK_SH)
|
||||||
message = file.read
|
message = file.read
|
||||||
|
file.flock(File::LOCK_UN)
|
||||||
begin
|
begin
|
||||||
spool = JSON.parse(message)
|
spool = JSON.parse(message)
|
||||||
message_parsed = JSON.parse(spool['msg'])
|
message_parsed = JSON.parse(spool['msg'])
|
||||||
|
|
|
@ -237,11 +237,10 @@ EventMachine.run {
|
||||||
next if client[:disconnect]
|
next if client[:disconnect]
|
||||||
log 'debug', 'checking for data...', client_id
|
log 'debug', 'checking for data...', client_id
|
||||||
begin
|
begin
|
||||||
queue = Sessions.queue( client_id )
|
queue = Sessions.queue(client_id)
|
||||||
if queue && queue[0]
|
next if queue.blank?
|
||||||
log 'notice', 'send data to client', client_id
|
log 'notice', 'send data to client', client_id
|
||||||
websocket_send(client_id, queue)
|
websocket_send(client_id, queue)
|
||||||
end
|
|
||||||
rescue => e
|
rescue => e
|
||||||
|
|
||||||
log 'error', 'problem:' + e.inspect, client_id
|
log 'error', 'problem:' + e.inspect, client_id
|
||||||
|
|
Loading…
Reference in a new issue