Fixed issue #599 - Elasticsearchs mapper-attachments plugin has been deprecated, use ingest-attachment now.
This commit is contained in:
parent
f62225dbde
commit
5ba83da6f5
6 changed files with 223 additions and 42 deletions
|
@ -40,7 +40,7 @@ returns
|
|||
article_attributes = article.search_index_attribute_lookup
|
||||
|
||||
# remove note needed attributes
|
||||
ignore = %w(message_id_md5 ticket)
|
||||
ignore = %w[message_id_md5 ticket]
|
||||
ignore.each do |attribute|
|
||||
article_attributes.delete(attribute)
|
||||
end
|
||||
|
@ -51,10 +51,8 @@ returns
|
|||
end
|
||||
|
||||
# lookup attachments
|
||||
article_attributes['attachment'] = []
|
||||
article.attachments.each do |attachment|
|
||||
if !article_attributes['attachment']
|
||||
article_attributes['attachment'] = []
|
||||
end
|
||||
|
||||
# check file size
|
||||
next if !attachment.content
|
||||
|
@ -70,7 +68,7 @@ returns
|
|||
|
||||
data = {
|
||||
'_name' => attachment.filename,
|
||||
'_content' => Base64.encode64(attachment.content)
|
||||
'_content' => Base64.encode64(attachment.content).delete("\n")
|
||||
}
|
||||
article_attributes['attachment'].push data
|
||||
end
|
||||
|
|
16
db/migrate/20171203000001_setting_es_pipeline.rb
Normal file
16
db/migrate/20171203000001_setting_es_pipeline.rb
Normal file
|
@ -0,0 +1,16 @@
|
|||
class SettingEsPipeline < ActiveRecord::Migration[5.1]
|
||||
def up
|
||||
|
||||
# return if it's a new setup
|
||||
return if !Setting.find_by(name: 'system_init_done')
|
||||
Setting.create_if_not_exists(
|
||||
title: 'Elasticsearch Pipeline Name',
|
||||
name: 'es_pipeline',
|
||||
area: 'SearchIndex::Elasticsearch',
|
||||
description: 'Define pipeline name for Elasticsearch.',
|
||||
state: '',
|
||||
preferences: { online_service_disable: true },
|
||||
frontend: false
|
||||
)
|
||||
end
|
||||
end
|
|
@ -2286,6 +2286,15 @@ Setting.create_if_not_exists(
|
|||
preferences: { online_service_disable: true },
|
||||
frontend: false
|
||||
)
|
||||
Setting.create_if_not_exists(
|
||||
title: 'Elasticsearch Pipeline Name',
|
||||
name: 'es_pipeline',
|
||||
area: 'SearchIndex::Elasticsearch',
|
||||
description: 'Define pipeline name for Elasticsearch.',
|
||||
state: '',
|
||||
preferences: { online_service_disable: true },
|
||||
frontend: false
|
||||
)
|
||||
|
||||
Setting.create_if_not_exists(
|
||||
title: 'Import Mode',
|
||||
|
|
|
@ -4,6 +4,102 @@ class SearchIndexBackend
|
|||
|
||||
=begin
|
||||
|
||||
info about used search index machine
|
||||
|
||||
SearchIndexBackend.info
|
||||
|
||||
=end
|
||||
|
||||
def self.info
|
||||
url = Setting.get('es_url').to_s
|
||||
Rails.logger.info "# curl -X GET \"#{url}\""
|
||||
response = UserAgent.get(
|
||||
url,
|
||||
{},
|
||||
{
|
||||
json: true,
|
||||
open_timeout: 8,
|
||||
read_timeout: 12,
|
||||
user: Setting.get('es_user'),
|
||||
password: Setting.get('es_password'),
|
||||
}
|
||||
)
|
||||
Rails.logger.info "# #{response.code}"
|
||||
raise "Unable to process GET at #{url}\n#{response.inspect}" if !response.success?
|
||||
response.data
|
||||
end
|
||||
|
||||
=begin
|
||||
|
||||
update processors
|
||||
|
||||
SearchIndexBackend.processors(
|
||||
_ingest/pipeline/attachment: {
|
||||
description: 'Extract attachment information from arrays',
|
||||
processors: [
|
||||
{
|
||||
foreach: {
|
||||
field: 'ticket.articles.attachments',
|
||||
processor: {
|
||||
attachment: {
|
||||
target_field: '_ingest._value.attachment',
|
||||
field: '_ingest._value.data'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
=end
|
||||
|
||||
def self.processors(data)
|
||||
data.each do |key, items|
|
||||
url = "#{Setting.get('es_url')}/#{key}"
|
||||
|
||||
items.each do |item|
|
||||
if item[:action] == 'delete'
|
||||
#Rails.logger.info "# curl -X DELETE \"#{url}\""
|
||||
#response = UserAgent.delete(
|
||||
# url,
|
||||
# {
|
||||
# json: true,
|
||||
# open_timeout: 8,
|
||||
# read_timeout: 12,
|
||||
# user: Setting.get('es_user'),
|
||||
# password: Setting.get('es_password'),
|
||||
# }
|
||||
#)
|
||||
#Rails.logger.info "# #{response.code}"
|
||||
#next if response.success?
|
||||
#raise "Unable to process DELETE at #{url}\n#{response.inspect}"
|
||||
next
|
||||
end
|
||||
Rails.logger.info "# curl -X PUT \"#{url}\" \\"
|
||||
Rails.logger.debug "-d '#{data.to_json}'"
|
||||
item.delete(:action)
|
||||
response = UserAgent.put(
|
||||
url,
|
||||
item,
|
||||
{
|
||||
json: true,
|
||||
open_timeout: 8,
|
||||
read_timeout: 12,
|
||||
user: Setting.get('es_user'),
|
||||
password: Setting.get('es_password'),
|
||||
}
|
||||
)
|
||||
Rails.logger.info "# #{response.code}"
|
||||
next if response.success?
|
||||
raise "Unable to process PUT at #{url}\n#{response.inspect}"
|
||||
end
|
||||
end
|
||||
true
|
||||
end
|
||||
|
||||
=begin
|
||||
|
||||
create/update/delete index
|
||||
|
||||
SearchIndexBackend.index(
|
||||
|
@ -51,8 +147,8 @@ create/update/delete index
|
|||
data[:data],
|
||||
{
|
||||
json: true,
|
||||
open_timeout: 5,
|
||||
read_timeout: 20,
|
||||
open_timeout: 8,
|
||||
read_timeout: 12,
|
||||
user: Setting.get('es_user'),
|
||||
password: Setting.get('es_password'),
|
||||
}
|
||||
|
@ -83,8 +179,8 @@ add new object to search index
|
|||
data,
|
||||
{
|
||||
json: true,
|
||||
open_timeout: 5,
|
||||
read_timeout: 20,
|
||||
open_timeout: 8,
|
||||
read_timeout: 16,
|
||||
user: Setting.get('es_user'),
|
||||
password: Setting.get('es_password'),
|
||||
}
|
||||
|
@ -113,8 +209,8 @@ remove whole data from index
|
|||
response = UserAgent.delete(
|
||||
url,
|
||||
{
|
||||
open_timeout: 5,
|
||||
read_timeout: 14,
|
||||
open_timeout: 8,
|
||||
read_timeout: 16,
|
||||
user: Setting.get('es_user'),
|
||||
password: Setting.get('es_password'),
|
||||
}
|
||||
|
@ -166,7 +262,7 @@ return search result
|
|||
def self.search_by_index(query, limit = 10, index = nil, query_extention = {})
|
||||
return [] if !query
|
||||
|
||||
url = build_url()
|
||||
url = build_url
|
||||
return if !url
|
||||
url += if index
|
||||
if index.class == Array
|
||||
|
@ -201,7 +297,7 @@ return search result
|
|||
# add * on simple query like "somephrase23" or "attribute: somephrase23"
|
||||
if query.present?
|
||||
query.strip!
|
||||
if query =~ /^([[:alpha:],0-9]+|[[:alpha:],0-9]+\:\s+[[:alpha:],0-9]+)$/
|
||||
if query.match?(/^([[:alpha:],0-9]+|[[:alpha:],0-9]+\:\s+[[:alpha:],0-9]+)$/)
|
||||
query += '*'
|
||||
end
|
||||
end
|
||||
|
@ -294,7 +390,7 @@ get count of tickets and tickets which match on selector
|
|||
def self.selectors(index = nil, selectors = nil, limit = 10, current_user = nil, aggs_interval = nil)
|
||||
raise 'no selectors given' if !selectors
|
||||
|
||||
url = build_url()
|
||||
url = build_url
|
||||
return if !url
|
||||
url += if index
|
||||
if index.class == Array
|
||||
|
@ -345,7 +441,7 @@ get count of tickets and tickets which match on selector
|
|||
def self.selector2query(selector, _current_user, aggs_interval, limit)
|
||||
query_must = []
|
||||
query_must_not = []
|
||||
if selector && !selector.empty?
|
||||
if selector.present?
|
||||
selector.each do |key, data|
|
||||
key_tmp = key.sub(/^.+?\./, '')
|
||||
t = {}
|
||||
|
@ -439,10 +535,14 @@ return true if backend is configured
|
|||
index = "#{Setting.get('es_index')}_#{Rails.env}"
|
||||
url = Setting.get('es_url')
|
||||
url = if type
|
||||
url_pipline = Setting.get('es_pipeline')
|
||||
if url_pipline.present?
|
||||
url_pipline = "?pipeline=#{url_pipline}"
|
||||
end
|
||||
if o_id
|
||||
"#{url}/#{index}/#{type}/#{o_id}"
|
||||
"#{url}/#{index}/#{type}/#{o_id}#{url_pipline}"
|
||||
else
|
||||
"#{url}/#{index}/#{type}"
|
||||
"#{url}/#{index}/#{type}#{url_pipline}"
|
||||
end
|
||||
else
|
||||
"#{url}/#{index}"
|
||||
|
|
|
@ -14,29 +14,87 @@ namespace :searchindex do
|
|||
|
||||
task :create, [:opts] => :environment do |_t, _args|
|
||||
|
||||
# create indexes
|
||||
puts 'create indexes...'
|
||||
SearchIndexBackend.index(
|
||||
action: 'create',
|
||||
data: {
|
||||
mappings: {
|
||||
Ticket: {
|
||||
_source: { excludes: [ 'article.attachment' ] },
|
||||
properties: {
|
||||
article: {
|
||||
type: 'nested',
|
||||
include_in_parent: true,
|
||||
properties: {
|
||||
attachment: {
|
||||
type: 'attachment',
|
||||
# es with mapper-attachments plugin
|
||||
number = SearchIndexBackend.info['version']['number'].to_s
|
||||
if number =~ /^[2-4]\./ || number =~ /^5\.[0-5]\./
|
||||
|
||||
# create indexes
|
||||
puts 'create indexes...'
|
||||
SearchIndexBackend.index(
|
||||
action: 'create',
|
||||
data: {
|
||||
mappings: {
|
||||
Ticket: {
|
||||
_source: { excludes: [ 'article.attachment' ] },
|
||||
properties: {
|
||||
article: {
|
||||
type: 'nested',
|
||||
include_in_parent: true,
|
||||
properties: {
|
||||
attachment: {
|
||||
type: 'attachment',
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
)
|
||||
Setting.set('es_pipeline', '')
|
||||
|
||||
# es with ingest-attachment plugin
|
||||
else
|
||||
|
||||
# create indexes
|
||||
puts 'create indexes...'
|
||||
SearchIndexBackend.index(
|
||||
action: 'create',
|
||||
data: {
|
||||
mappings: {
|
||||
Ticket: {
|
||||
_source: { excludes: [ 'article.attachment' ] },
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
# update processors
|
||||
pipeline = 'zammad-attachment'
|
||||
Setting.set('es_pipeline', pipeline)
|
||||
SearchIndexBackend.processors(
|
||||
"_ingest/pipeline/#{pipeline}": [
|
||||
{
|
||||
action: 'delete',
|
||||
},
|
||||
{
|
||||
action: 'create',
|
||||
description: 'Extract zammad-attachment information from arrays',
|
||||
processors: [
|
||||
{
|
||||
foreach: {
|
||||
field: 'article',
|
||||
ignore_failure: true,
|
||||
processor: {
|
||||
foreach: {
|
||||
field: '_ingest._value.attachment',
|
||||
ignore_failure: true,
|
||||
processor: {
|
||||
attachment: {
|
||||
target_field: '_ingest._value',
|
||||
field: '_ingest._value._content',
|
||||
ignore_failure: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
)
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# encoding: utf-8
|
||||
|
||||
require 'integration_test_helper'
|
||||
require 'rake'
|
||||
|
||||
|
@ -168,7 +168,7 @@ class ElasticsearchTest < ActiveSupport::TestCase
|
|||
Store.add(
|
||||
object: 'Ticket::Article',
|
||||
o_id: article1.id,
|
||||
data: IO.binread("#{Rails.root}/test/fixtures/es-normal.txt"),
|
||||
data: IO.binread(Rails.root.join('test', 'fixtures', 'es-normal.txt')),
|
||||
filename: 'es-normal.txt',
|
||||
preferences: {},
|
||||
created_by_id: 1,
|
||||
|
@ -194,9 +194,9 @@ class ElasticsearchTest < ActiveSupport::TestCase
|
|||
assert(attributes['article'][0]['attachment'][0])
|
||||
assert_not(attributes['article'][0]['attachment'][1])
|
||||
assert_equal('es-normal.txt', attributes['article'][0]['attachment'][0]['_name'])
|
||||
assert_equal("c29tZSBub3JtYWwgdGV4dDY2Cg==\n", attributes['article'][0]['attachment'][0]['_content'])
|
||||
assert_equal('c29tZSBub3JtYWwgdGV4dDY2Cg==', attributes['article'][0]['attachment'][0]['_content'])
|
||||
|
||||
ticket1.destroy
|
||||
ticket1.destroy!
|
||||
|
||||
# execute background jobs
|
||||
Scheduler.worker(true)
|
||||
|
@ -229,7 +229,7 @@ class ElasticsearchTest < ActiveSupport::TestCase
|
|||
Store.add(
|
||||
object: 'Ticket::Article',
|
||||
o_id: article1.id,
|
||||
data: IO.binread("#{Rails.root}/test/fixtures/es-normal.txt"),
|
||||
data: IO.binread(Rails.root.join('test', 'fixtures', 'es-normal.txt')),
|
||||
filename: 'es-normal.txt',
|
||||
preferences: {},
|
||||
created_by_id: 1,
|
||||
|
@ -240,7 +240,7 @@ class ElasticsearchTest < ActiveSupport::TestCase
|
|||
Store.add(
|
||||
object: 'Ticket::Article',
|
||||
o_id: article1.id,
|
||||
data: IO.binread("#{Rails.root}/test/fixtures/es-pdf1.pdf"),
|
||||
data: IO.binread(Rails.root.join('test', 'fixtures', 'es-pdf1.pdf')),
|
||||
filename: 'es-pdf1.pdf',
|
||||
preferences: {},
|
||||
created_by_id: 1,
|
||||
|
@ -251,7 +251,7 @@ class ElasticsearchTest < ActiveSupport::TestCase
|
|||
Store.add(
|
||||
object: 'Ticket::Article',
|
||||
o_id: article1.id,
|
||||
data: IO.binread("#{Rails.root}/test/fixtures/es-box1.box"),
|
||||
data: IO.binread(Rails.root.join('test', 'fixtures', 'es-box1.box')),
|
||||
filename: 'mail1.box',
|
||||
preferences: {},
|
||||
created_by_id: 1,
|
||||
|
@ -262,7 +262,7 @@ class ElasticsearchTest < ActiveSupport::TestCase
|
|||
Store.add(
|
||||
object: 'Ticket::Article',
|
||||
o_id: article1.id,
|
||||
data: IO.binread("#{Rails.root}/test/fixtures/es-too-big.txt"),
|
||||
data: IO.binread(Rails.root.join('test', 'fixtures', 'es-too-big.txt')),
|
||||
filename: 'es-too-big.txt',
|
||||
preferences: {},
|
||||
created_by_id: 1,
|
||||
|
|
Loading…
Reference in a new issue