2013-02-07 10:45:24 -05:00
|
|
|
# TODO:
|
2013-02-05 14:16:51 -05:00
|
|
|
# a mechanism to iterate through errors in reverse
|
|
|
|
# async logging should queue, if dupe stack traces are found in batch error should be merged into prev one
|
2013-02-07 10:45:24 -05:00
|
|
|
|
2013-02-05 14:16:51 -05:00
|
|
|
class ErrorLog
|
2013-02-07 10:45:24 -05:00
|
|
|
|
2013-02-05 14:16:51 -05:00
|
|
|
@lock = Mutex.new
|
|
|
|
|
|
|
|
def self.filename
|
|
|
|
"#{Rails.root}/log/#{Rails.env}_errors.log"
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.clear!(guid)
|
2013-02-28 13:54:12 -05:00
|
|
|
raise NotImplementedError
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
2013-02-07 10:45:24 -05:00
|
|
|
|
2013-02-05 14:16:51 -05:00
|
|
|
def self.clear_all!()
|
|
|
|
File.delete(ErrorLog.filename) if File.exists?(ErrorLog.filename)
|
|
|
|
end
|
2013-02-07 10:45:24 -05:00
|
|
|
|
|
|
|
def self.report_async!(exception, controller, request, user)
|
2013-02-05 14:16:51 -05:00
|
|
|
Thread.new do
|
2013-02-28 13:54:12 -05:00
|
|
|
report!(exception, controller, request, user)
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-02-07 10:45:24 -05:00
|
|
|
def self.report!(exception, controller, request, user)
|
2013-02-05 14:16:51 -05:00
|
|
|
add_row!(
|
2013-02-28 13:54:12 -05:00
|
|
|
date: DateTime.now,
|
|
|
|
guid: SecureRandom.uuid,
|
|
|
|
user_id: user && user.id,
|
2013-10-12 17:07:09 -04:00
|
|
|
parameters: request && request.filtered_parameters.to_json,
|
2013-02-28 13:54:12 -05:00
|
|
|
action: controller.action_name,
|
|
|
|
controller: controller.controller_name,
|
|
|
|
backtrace: sanitize_backtrace(exception.backtrace).join("\n"),
|
|
|
|
message: exception.message,
|
|
|
|
url: "#{request.protocol}#{request.env["HTTP_X_FORWARDED_HOST"] || request.env["HTTP_HOST"]}#{request.fullpath}",
|
|
|
|
exception_class: exception.class.to_s
|
2013-02-05 14:16:51 -05:00
|
|
|
)
|
|
|
|
end
|
2013-02-07 10:45:24 -05:00
|
|
|
|
2013-02-05 14:16:51 -05:00
|
|
|
def self.add_row!(hash)
|
|
|
|
data = hash.to_xml(skip_instruct: true)
|
|
|
|
# use background thread to write the log cause it may block if it gets backed up
|
2013-02-07 10:45:24 -05:00
|
|
|
@lock.synchronize do
|
2013-02-28 13:54:12 -05:00
|
|
|
File.open(filename, "a") do |f|
|
2013-02-05 14:16:51 -05:00
|
|
|
f.flock(File::LOCK_EX)
|
|
|
|
f.write(data)
|
|
|
|
f.close
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
def self.each(&blk)
|
2013-02-28 13:54:12 -05:00
|
|
|
skip(0, &blk)
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def self.skip(skip=0)
|
|
|
|
pos = 0
|
2013-02-28 13:54:12 -05:00
|
|
|
return [] unless File.exists?(filename)
|
2013-02-05 14:16:51 -05:00
|
|
|
|
2013-02-07 10:45:24 -05:00
|
|
|
loop do
|
2013-02-05 14:16:51 -05:00
|
|
|
lines = ""
|
|
|
|
File.open(self.filename, "r") do |f|
|
|
|
|
f.flock(File::LOCK_SH)
|
|
|
|
f.pos = pos
|
|
|
|
while !f.eof?
|
|
|
|
line = f.readline
|
|
|
|
lines << line
|
2013-02-07 10:45:24 -05:00
|
|
|
break if line.starts_with? "</hash>"
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
|
|
|
pos = f.pos
|
|
|
|
end
|
|
|
|
if lines != "" && skip == 0
|
|
|
|
h = {}
|
2013-02-07 10:45:24 -05:00
|
|
|
e = Nokogiri.parse(lines).children[0]
|
2013-02-05 14:16:51 -05:00
|
|
|
e.children.each do |inner|
|
|
|
|
h[inner.name] = inner.text
|
|
|
|
end
|
|
|
|
yield h
|
|
|
|
end
|
|
|
|
skip-=1 if skip > 0
|
|
|
|
break if lines == ""
|
2013-02-07 10:45:24 -05:00
|
|
|
end
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def self.sanitize_backtrace(trace)
|
|
|
|
re = Regexp.new(/^#{Regexp.escape(Rails.root.to_s)}/)
|
|
|
|
trace.map { |line| Pathname.new(line.gsub(re, "[RAILS_ROOT]")).cleanpath.to_s }
|
|
|
|
end
|
|
|
|
|
|
|
|
end
|