Refactor .csv_import
This commit is contained in:
parent
7a7ab6a95a
commit
65d1ba133d
4 changed files with 92 additions and 178 deletions
|
@ -48,220 +48,129 @@ returns
|
||||||
=end
|
=end
|
||||||
|
|
||||||
def csv_import(data)
|
def csv_import(data)
|
||||||
try = true
|
try = data[:try].to_s == 'true'
|
||||||
if data[:try] != 'true' && data[:try] != true
|
delete = data[:delete].to_s == 'true'
|
||||||
try = false
|
|
||||||
end
|
begin
|
||||||
delete = false
|
data[:string] = File.read(data[:file]) if data[:file].present?
|
||||||
if data[:delete] == true || data[:delete] == 'true'
|
rescue Errno::ENOENT
|
||||||
delete = true
|
raise Exceptions::UnprocessableEntity, "No such file '#{data[:file]}'"
|
||||||
|
rescue => e
|
||||||
|
raise Exceptions::UnprocessableEntity, "Unable to read file '#{data[:file]}': #{e.inspect}"
|
||||||
end
|
end
|
||||||
|
|
||||||
errors = []
|
header, *rows = ::CSV.parse(data[:string], data[:parse_params])
|
||||||
if delete == true && @csv_delete_possible != true
|
header&.each { |column| column.try(:strip!) }
|
||||||
errors.push "Delete is not possible for #{new.class}."
|
header&.each { |column| column.try(:downcase!) }
|
||||||
result = {
|
|
||||||
errors: errors,
|
begin
|
||||||
|
raise "Delete is not possible for #{self}." if delete && !csv_delete_possible
|
||||||
|
raise "Unable to parse empty file/string for #{self}." if data[:string].blank?
|
||||||
|
raise "Unable to parse file/string without header for #{self}." if header.blank?
|
||||||
|
raise "No records found in file/string for #{self}." if rows.first.blank?
|
||||||
|
raise "No lookup column like #{lookup_keys.map(&:to_s).join(',')} for #{self} found." if (header & lookup_keys.map(&:to_s)).none?
|
||||||
|
rescue => e
|
||||||
|
return {
|
||||||
try: try,
|
try: try,
|
||||||
result: 'failed',
|
result: 'failed',
|
||||||
|
errors: [e.message],
|
||||||
}
|
}
|
||||||
return result
|
|
||||||
end
|
|
||||||
|
|
||||||
if data[:file].present?
|
|
||||||
raise Exceptions::UnprocessableEntity, "No such file '#{data[:file]}'" if !File.exist?(data[:file])
|
|
||||||
|
|
||||||
begin
|
|
||||||
file = File.open(data[:file], 'r:UTF-8')
|
|
||||||
data[:string] = file.read
|
|
||||||
rescue => e
|
|
||||||
raise Exceptions::UnprocessableEntity, "Unable to read file '#{data[:file]}': #{e.inspect}"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
if data[:string].blank?
|
|
||||||
errors.push "Unable to parse empty file/string for #{new.class}."
|
|
||||||
result = {
|
|
||||||
errors: errors,
|
|
||||||
try: try,
|
|
||||||
result: 'failed',
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
end
|
|
||||||
|
|
||||||
rows = ::CSV.parse(data[:string], data[:parse_params])
|
|
||||||
header = rows.shift
|
|
||||||
if header.blank?
|
|
||||||
errors.push "Unable to parse file/string without header for #{new.class}."
|
|
||||||
result = {
|
|
||||||
errors: errors,
|
|
||||||
try: try,
|
|
||||||
result: 'failed',
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
end
|
|
||||||
header.each do |item|
|
|
||||||
if item.respond_to?(:strip!)
|
|
||||||
item.strip!
|
|
||||||
end
|
|
||||||
next if !item.respond_to?(:downcase!)
|
|
||||||
|
|
||||||
item.downcase!
|
|
||||||
end
|
|
||||||
|
|
||||||
if rows[0].blank?
|
|
||||||
errors.push "No records found in file/string for #{new.class}."
|
|
||||||
result = {
|
|
||||||
errors: errors,
|
|
||||||
try: try,
|
|
||||||
result: 'failed',
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
end
|
|
||||||
|
|
||||||
# check if min one lookup key exists
|
|
||||||
if header.count == (header - lookup_keys.map(&:to_s)).count
|
|
||||||
errors.push "No lookup column like #{lookup_keys.map(&:to_s).join(',')} for #{new.class} found."
|
|
||||||
result = {
|
|
||||||
errors: errors,
|
|
||||||
try: try,
|
|
||||||
result: 'failed',
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
end
|
end
|
||||||
|
|
||||||
# get payload based on csv
|
# get payload based on csv
|
||||||
payload = []
|
payload = []
|
||||||
rows.each do |row|
|
rows.each do |row|
|
||||||
if row[0].blank? && row[1].blank?
|
if row.first(2).any?(&:present?)
|
||||||
payload_last = payload.last
|
payload.push(
|
||||||
row.each_with_index do |item, count|
|
header.zip(row).to_h
|
||||||
next if item.blank?
|
.compact.transform_values(&:strip)
|
||||||
next if header[count].nil?
|
.except(nil).transform_keys(&:to_sym)
|
||||||
|
.except(*csv_attributes_ignored)
|
||||||
if payload_last[header[count].to_sym].class != Array
|
.merge(data[:fixed_params] || {})
|
||||||
payload_last[header[count].to_sym] = [payload_last[header[count].to_sym]]
|
)
|
||||||
end
|
else
|
||||||
payload_last[header[count].to_sym].push item.strip
|
header.zip(row).to_h
|
||||||
end
|
.compact.transform_values(&:strip)
|
||||||
next
|
.except(nil).transform_keys(&:to_sym)
|
||||||
|
.each { |col, val| payload.last[col] = [*payload.last[col], val] }
|
||||||
end
|
end
|
||||||
attributes = {}
|
|
||||||
row.each_with_index do |item, count|
|
|
||||||
next if !item
|
|
||||||
next if header[count].blank?
|
|
||||||
next if @csv_attributes_ignored&.include?(header[count].to_sym)
|
|
||||||
|
|
||||||
attributes[header[count].to_sym] = if item.respond_to?(:strip)
|
|
||||||
item.strip
|
|
||||||
else
|
|
||||||
item
|
|
||||||
end
|
|
||||||
end
|
|
||||||
data[:fixed_params]&.each do |key, value|
|
|
||||||
attributes[key] = value
|
|
||||||
end
|
|
||||||
payload.push attributes
|
|
||||||
end
|
end
|
||||||
|
|
||||||
stats = {
|
stats = {
|
||||||
created: 0,
|
created: 0,
|
||||||
updated: 0,
|
updated: 0,
|
||||||
}
|
deleted: (count if delete),
|
||||||
|
}.compact
|
||||||
|
|
||||||
# delete
|
# delete
|
||||||
if delete == true
|
destroy_all if delete && !try
|
||||||
stats[:deleted] = self.count
|
|
||||||
if try == false
|
|
||||||
destroy_all
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# create or update records
|
# create or update records
|
||||||
csv_object_ids_ignored = @csv_object_ids_ignored || []
|
|
||||||
records = []
|
records = []
|
||||||
line_count = 0
|
errors = []
|
||||||
|
|
||||||
Transaction.execute(disable_notification: true, bulk: true) do
|
transaction do
|
||||||
payload.each do |attributes|
|
payload.each.with_index do |attributes, i|
|
||||||
line_count += 1
|
record = (lookup_keys & attributes.keys).lazy.map do |lookup_key|
|
||||||
record = nil
|
params = attributes.slice(lookup_key)
|
||||||
lookup_keys.each do |lookup_by|
|
params.transform_values!(&:downcase) if lookup_key.in?(%i[email login])
|
||||||
next if attributes[lookup_by].blank?
|
lookup(params)
|
||||||
|
end.detect(&:present?)
|
||||||
|
|
||||||
record = if lookup_by.in?(%i[name])
|
if record&.in?(records)
|
||||||
find_by("LOWER(#{lookup_by}) = ?", attributes[lookup_by].downcase)
|
errors.push "Line #{i.next}: duplicate record found."
|
||||||
elsif lookup_by.in?(%i[email login])
|
|
||||||
lookup(attributes.slice(lookup_by).transform_values!(&:downcase))
|
|
||||||
else
|
|
||||||
lookup(attributes.slice(lookup_by))
|
|
||||||
end
|
|
||||||
|
|
||||||
break if record
|
|
||||||
end
|
|
||||||
|
|
||||||
if record.in?(records)
|
|
||||||
errors.push "Line #{line_count}: duplicate record found."
|
|
||||||
next
|
next
|
||||||
end
|
end
|
||||||
|
|
||||||
if attributes[:id].present? && !record
|
if !record && attributes[:id].present?
|
||||||
errors.push "Line #{line_count}: unknown record with id '#{attributes[:id]}' for #{new.class}."
|
errors.push "Line #{i.next}: unknown #{self} with id '#{attributes[:id]}'."
|
||||||
next
|
next
|
||||||
end
|
end
|
||||||
|
|
||||||
if record && csv_object_ids_ignored.include?(record.id)
|
if record&.id&.in?(csv_object_ids_ignored)
|
||||||
errors.push "Line #{line_count}: unable to update record with id '#{attributes[:id]}' for #{new.class}."
|
errors.push "Line #{i.next}: unable to update #{self} with id '#{attributes[:id]}'."
|
||||||
next
|
next
|
||||||
end
|
end
|
||||||
|
|
||||||
begin
|
begin
|
||||||
clean_params = association_name_to_id_convert(attributes)
|
clean_params = association_name_to_id_convert(attributes)
|
||||||
rescue => e
|
rescue => e
|
||||||
errors.push "Line #{line_count}: #{e.message}"
|
errors.push "Line #{i.next}: #{e.message}"
|
||||||
next
|
next
|
||||||
end
|
end
|
||||||
|
|
||||||
# create object
|
# create object
|
||||||
UserInfo.current_user_id = clean_params[:updated_by_id] || clean_params[:created_by_id]
|
Transaction.execute(disable_notification: true, reset_user_id: true, bulk: true) do
|
||||||
if !record || delete == true
|
UserInfo.current_user_id = clean_params[:updated_by_id] || clean_params[:created_by_id]
|
||||||
stats[:created] += 1
|
|
||||||
begin
|
|
||||||
csv_verify_attributes(clean_params)
|
|
||||||
clean_params = param_cleanup(clean_params)
|
|
||||||
|
|
||||||
if !UserInfo.current_user_id
|
if !record || delete == true
|
||||||
clean_params[:created_by_id] = 1
|
stats[:created] += 1
|
||||||
clean_params[:updated_by_id] = 1
|
begin
|
||||||
end
|
csv_verify_attributes(clean_params)
|
||||||
record = new(clean_params)
|
|
||||||
record.associations_from_param(attributes)
|
|
||||||
record.save!
|
|
||||||
rescue => e
|
|
||||||
errors.push "Line #{line_count}: Unable to create record - #{e.message}"
|
|
||||||
next
|
|
||||||
end
|
|
||||||
else
|
|
||||||
stats[:updated] += 1
|
|
||||||
begin
|
|
||||||
csv_verify_attributes(clean_params)
|
|
||||||
clean_params = param_cleanup(clean_params)
|
|
||||||
|
|
||||||
if !UserInfo.current_user_id
|
record = new(param_cleanup(clean_params).reverse_merge(created_by_id: 1, updated_by_id: 1))
|
||||||
clean_params[:updated_by_id] = 1
|
|
||||||
end
|
|
||||||
|
|
||||||
record.with_lock do
|
|
||||||
record.associations_from_param(attributes)
|
record.associations_from_param(attributes)
|
||||||
clean_params.each do |key, value|
|
|
||||||
record[key] = value
|
|
||||||
end
|
|
||||||
next if !record.changed?
|
|
||||||
|
|
||||||
record.save!
|
record.save!
|
||||||
|
rescue => e
|
||||||
|
errors.push "Line #{i.next}: Unable to create record - #{e.message}"
|
||||||
|
next
|
||||||
|
end
|
||||||
|
else
|
||||||
|
stats[:updated] += 1
|
||||||
|
|
||||||
|
begin
|
||||||
|
csv_verify_attributes(clean_params)
|
||||||
|
clean_params = param_cleanup(clean_params).reverse_merge(updated_by_id: 1)
|
||||||
|
|
||||||
|
record.with_lock do
|
||||||
|
record.associations_from_param(attributes)
|
||||||
|
record.assign_attributes(clean_params)
|
||||||
|
record.save! if record.changed?
|
||||||
|
end
|
||||||
|
rescue => e
|
||||||
|
errors.push "Line #{i.next}: Unable to update record - #{e.message}"
|
||||||
|
next
|
||||||
end
|
end
|
||||||
rescue => e
|
|
||||||
errors.push "Line #{line_count}: Unable to update record - #{e.message}"
|
|
||||||
next
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -324,7 +233,6 @@ returns
|
||||||
|
|
||||||
def csv_example(params = {})
|
def csv_example(params = {})
|
||||||
header = []
|
header = []
|
||||||
csv_object_ids_ignored = @csv_object_ids_ignored || []
|
|
||||||
records = where.not(id: csv_object_ids_ignored).offset(1).limit(23).to_a
|
records = where.not(id: csv_object_ids_ignored).offset(1).limit(23).to_a
|
||||||
if records.count < 20
|
if records.count < 20
|
||||||
record_ids = records.pluck(:id).concat(csv_object_ids_ignored)
|
record_ids = records.pluck(:id).concat(csv_object_ids_ignored)
|
||||||
|
@ -338,7 +246,7 @@ returns
|
||||||
record_attributes_with_association_names.each do |key, value|
|
record_attributes_with_association_names.each do |key, value|
|
||||||
next if value.class == ActiveSupport::HashWithIndifferentAccess
|
next if value.class == ActiveSupport::HashWithIndifferentAccess
|
||||||
next if value.class == Hash
|
next if value.class == Hash
|
||||||
next if @csv_attributes_ignored&.include?(key.to_sym)
|
next if csv_attributes_ignored&.include?(key.to_sym)
|
||||||
next if key.match?(/_id$/)
|
next if key.match?(/_id$/)
|
||||||
next if key.match?(/_ids$/)
|
next if key.match?(/_ids$/)
|
||||||
next if key == 'created_by'
|
next if key == 'created_by'
|
||||||
|
@ -405,6 +313,8 @@ end
|
||||||
=end
|
=end
|
||||||
|
|
||||||
def csv_object_ids_ignored(*object_ids)
|
def csv_object_ids_ignored(*object_ids)
|
||||||
|
return @csv_object_ids_ignored || [] if object_ids.empty?
|
||||||
|
|
||||||
@csv_object_ids_ignored = object_ids
|
@csv_object_ids_ignored = object_ids
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -428,6 +338,8 @@ end
|
||||||
=end
|
=end
|
||||||
|
|
||||||
def csv_attributes_ignored(*attributes)
|
def csv_attributes_ignored(*attributes)
|
||||||
|
return @csv_attributes_ignored || [] if attributes.empty?
|
||||||
|
|
||||||
@csv_attributes_ignored = attributes
|
@csv_attributes_ignored = attributes
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -443,8 +355,10 @@ end
|
||||||
|
|
||||||
=end
|
=end
|
||||||
|
|
||||||
def csv_delete_possible(value)
|
def csv_delete_possible(*value)
|
||||||
@csv_delete_possible = value
|
return @csv_delete_possible if value.empty?
|
||||||
|
|
||||||
|
@csv_delete_possible = value.first
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -121,7 +121,7 @@ class OrganizationCsvImportTest < ActiveSupport::TestCase
|
||||||
assert_equal(true, result[:try])
|
assert_equal(true, result[:try])
|
||||||
assert_equal(1, result[:errors].count)
|
assert_equal(1, result[:errors].count)
|
||||||
assert_equal('failed', result[:result])
|
assert_equal('failed', result[:result])
|
||||||
assert_equal("Line 1: unknown record with id '999999999' for Organization.", result[:errors][0])
|
assert_equal("Line 1: unknown Organization with id '999999999'.", result[:errors][0])
|
||||||
|
|
||||||
assert_nil(Organization.find_by(name: 'organization-simple-invalid_id-import1'))
|
assert_nil(Organization.find_by(name: 'organization-simple-invalid_id-import1'))
|
||||||
assert_nil(Organization.find_by(name: 'organization-simple-invalid_id-import2'))
|
assert_nil(Organization.find_by(name: 'organization-simple-invalid_id-import2'))
|
||||||
|
|
|
@ -128,7 +128,7 @@ class TicketCsvImportTest < ActiveSupport::TestCase
|
||||||
assert_equal(true, result[:try])
|
assert_equal(true, result[:try])
|
||||||
assert_equal(1, result[:errors].count)
|
assert_equal(1, result[:errors].count)
|
||||||
assert_equal('failed', result[:result])
|
assert_equal('failed', result[:result])
|
||||||
assert_equal("Line 1: unknown record with id '999999999' for Ticket.", result[:errors][0])
|
assert_equal("Line 1: unknown Ticket with id '999999999'.", result[:errors][0])
|
||||||
|
|
||||||
assert_nil(Ticket.find_by(number: '123456'))
|
assert_nil(Ticket.find_by(number: '123456'))
|
||||||
assert_nil(Ticket.find_by(number: '123457'))
|
assert_nil(Ticket.find_by(number: '123457'))
|
||||||
|
|
|
@ -195,7 +195,7 @@ class UserCsvImportTest < ActiveSupport::TestCase
|
||||||
assert_equal(true, result[:try])
|
assert_equal(true, result[:try])
|
||||||
assert_equal(1, result[:errors].count)
|
assert_equal(1, result[:errors].count)
|
||||||
assert_equal('failed', result[:result])
|
assert_equal('failed', result[:result])
|
||||||
assert_equal("Line 1: unknown record with id '999999999' for User.", result[:errors][0])
|
assert_equal("Line 1: unknown User with id '999999999'.", result[:errors][0])
|
||||||
|
|
||||||
assert_nil(User.find_by(login: 'user-simple-invalid_id-import1'))
|
assert_nil(User.find_by(login: 'user-simple-invalid_id-import1'))
|
||||||
assert_nil(User.find_by(login: 'user-simple-invalid_id-import2'))
|
assert_nil(User.find_by(login: 'user-simple-invalid_id-import2'))
|
||||||
|
@ -230,7 +230,7 @@ class UserCsvImportTest < ActiveSupport::TestCase
|
||||||
assert_equal(true, result[:try])
|
assert_equal(true, result[:try])
|
||||||
assert_equal(1, result[:errors].count)
|
assert_equal(1, result[:errors].count)
|
||||||
assert_equal('failed', result[:result])
|
assert_equal('failed', result[:result])
|
||||||
assert_equal("Line 1: unable to update record with id '1' for User.", result[:errors][0])
|
assert_equal("Line 1: unable to update User with id '1'.", result[:errors][0])
|
||||||
|
|
||||||
assert_nil(User.find_by(login: 'user-simple-readonly_id-import1'))
|
assert_nil(User.find_by(login: 'user-simple-readonly_id-import1'))
|
||||||
assert_nil(User.find_by(login: 'user-simple-readonly_id-import2'))
|
assert_nil(User.find_by(login: 'user-simple-readonly_id-import2'))
|
||||||
|
|
Loading…
Reference in a new issue