Added support for elasticsearch 6 and 7. Fixes #1688.

This commit is contained in:
Martin Edenhofer 2019-06-20 12:45:27 +02:00
parent 9949dd18dc
commit af9fcf4565
11 changed files with 310 additions and 155 deletions

View file

@ -214,16 +214,9 @@ test:integration:clearbit:
### Elasticsearch
test:integration:es:
.script_integration_es_template: &script_integration_es_definition
<<: *base_env
stage: test
services:
- name: registry.znuny.com/docker/zammad-mysql:latest
alias: mysql
- name: registry.znuny.com/docker/zammad-postgresql:latest
alias: postgresql
- name: registry.znuny.com/docker/zammad-elasticsearch:5.6
alias: elasticsearch
variables:
RAILS_ENV: "test"
ES_INDEX_RAND: "true"
@ -235,6 +228,36 @@ test:integration:es:
- bundle exec rspec --tag searchindex
- bundle exec rails test test/integration/report_test.rb
test:integration:es:5.6:
<<: *script_integration_es_definition
services:
- name: registry.znuny.com/docker/zammad-mysql:latest
alias: mysql
- name: registry.znuny.com/docker/zammad-postgresql:latest
alias: postgresql
- name: registry.znuny.com/docker/zammad-elasticsearch:5.6
alias: elasticsearch
test:integration:es:6:
<<: *script_integration_es_definition
services:
- name: registry.znuny.com/docker/zammad-mysql:latest
alias: mysql
- name: registry.znuny.com/docker/zammad-postgresql:latest
alias: postgresql
- name: registry.znuny.com/docker/zammad-elasticsearch:6
alias: elasticsearch
test:integration:es:7:
<<: *script_integration_es_definition
services:
- name: registry.znuny.com/docker/zammad-mysql:latest
alias: mysql
- name: registry.znuny.com/docker/zammad-postgresql:latest
alias: postgresql
- name: registry.znuny.com/docker/zammad-elasticsearch:7
alias: elasticsearch
### Zendesk
test:integration:zendesk:
@ -320,7 +343,7 @@ browser:build:
alias: mysql
- name: registry.znuny.com/docker/zammad-postgresql:latest
alias: postgresql
- name: registry.znuny.com/docker/zammad-elasticsearch:5.6
- name: registry.znuny.com/docker/zammad-elasticsearch:stable
alias: elasticsearch
- name: docker.io/elgalu/selenium:3.14.0-p17
alias: selenium

View file

@ -197,12 +197,16 @@ class FormController < ApplicationController
def limit_reached?
return false if !SearchIndexBackend.enabled?
# quote ipv6 ip'
remote_ip = request.remote_ip.gsub(':', '\\:')
# in elasticsearch7 "created_at:>now-1h" is not working. Needed to catch -2h
form_limit_by_ip_per_hour = Setting.get('form_ticket_create_by_ip_per_hour') || 20
result = SearchIndexBackend.search("preferences.form.remote_ip:'#{request.remote_ip}' AND created_at:>now-1h", 'Ticket', limit: form_limit_by_ip_per_hour)
result = SearchIndexBackend.search("preferences.form.remote_ip:'#{remote_ip}' AND created_at:>now-2h", 'Ticket', limit: form_limit_by_ip_per_hour)
raise Exceptions::NotAuthorized if result.count >= form_limit_by_ip_per_hour.to_i
form_limit_by_ip_per_day = Setting.get('form_ticket_create_by_ip_per_day') || 240
result = SearchIndexBackend.search("preferences.form.remote_ip:'#{request.remote_ip}' AND created_at:>now-1d", 'Ticket', limit: form_limit_by_ip_per_day)
result = SearchIndexBackend.search("preferences.form.remote_ip:'#{remote_ip}' AND created_at:>now-1d", 'Ticket', limit: form_limit_by_ip_per_day)
raise Exceptions::NotAuthorized if result.count >= form_limit_by_ip_per_day.to_i
form_limit_per_day = Setting.get('form_ticket_create_per_day') || 5000

View file

@ -39,7 +39,11 @@ returns
end
if sort_by.blank?
sort_by.push(default)
if default.is_a?(Array)
sort_by = default
else
sort_by.push(default)
end
end
sort_by
@ -77,7 +81,11 @@ returns
end
if order_by.blank?
order_by.push(default)
if default.is_a?(Array)
order_by = default
else
order_by.push(default)
end
end
order_by

View file

@ -72,11 +72,11 @@ returns
offset = params[:offset] || 0
current_user = params[:current_user]
# check sort
sort_by = search_get_sort_by(params, 'updated_at')
# check sort - positions related to order by
sort_by = search_get_sort_by(params, %w[active updated_at])
# check order
order_by = search_get_order_by(params, 'desc')
# check order - positions related to sort by
order_by = search_get_order_by(params, %w[desc desc])
# enable search only for agents and admins
return [] if !search_preferences(current_user)

View file

@ -83,11 +83,11 @@ returns
offset = params[:offset] || 0
current_user = params[:current_user]
# check sort
sort_by = search_get_sort_by(params, 'updated_at')
# check sort - positions related to order by
sort_by = search_get_sort_by(params, %w[active updated_at])
# check order
order_by = search_get_order_by(params, 'desc')
# check order - positions related to sort by
order_by = search_get_order_by(params, %w[desc desc])
# enable search only for agents and admins
return [] if !search_preferences(current_user)

View file

@ -0,0 +1,18 @@
class SettingEsMultiIndex < ActiveRecord::Migration[5.1]
def up
# return if it's a new setup
return if !Setting.find_by(name: 'system_init_done')
Setting.create_if_not_exists(
title: 'Elasticsearch Multi Index',
name: 'es_multi_index',
area: 'SearchIndex::Elasticsearch',
description: 'Define if Elasticsearch is using multiple indexes.',
state: false,
preferences: { online_service_disable: true },
frontend: false
)
end
end

View file

@ -2849,6 +2849,15 @@ Setting.create_if_not_exists(
preferences: { online_service_disable: true },
frontend: false
)
Setting.create_if_not_exists(
title: 'Elasticsearch Multi Index',
name: 'es_multi_index',
area: 'SearchIndex::Elasticsearch',
description: 'Define if Elasticsearch is using multiple indexes.',
state: false,
preferences: { online_service_disable: true },
frontend: false
)
Setting.create_if_not_exists(
title: 'Import Mode',

View file

@ -45,7 +45,7 @@ returns
end
selector.merge!(without_merged_tickets) # do not show merged tickets in reports
result_es = SearchIndexBackend.selectors(['Ticket'], selector, {}, aggs_interval)
result_es = SearchIndexBackend.selectors('Ticket', selector, {}, aggs_interval)
if params[:interval] == 'month'
stop_interval = 12
elsif params[:interval] == 'week'
@ -166,7 +166,7 @@ returns
end
selector.merge!(without_merged_tickets) # do not show merged tickets in reports
result = SearchIndexBackend.selectors(['Ticket'], selector, { limit: limit }, aggs_interval)
result = SearchIndexBackend.selectors('Ticket', selector, { limit: limit }, aggs_interval)
return result if params[:sheet].present?
assets = {}

View file

@ -31,8 +31,11 @@ info about used search index machine
installed_version = response.data.dig('version', 'number')
raise "Unable to get elasticsearch version from response: #{response.inspect}" if installed_version.blank?
version_supported = Gem::Version.new(installed_version) < Gem::Version.new('5.7')
raise "Version #{installed_version} of configured elasticsearch is not supported" if !version_supported
version_supported = Gem::Version.new(installed_version) < Gem::Version.new('8')
raise "Version #{installed_version} of configured elasticsearch is not supported." if !version_supported
version_supported = Gem::Version.new(installed_version) > Gem::Version.new('2.3')
raise "Version #{installed_version} of configured elasticsearch is not supported." if !version_supported
return response.data
end
@ -130,6 +133,7 @@ create/update/delete index
SearchIndexBackend.index(
:action => 'create', # create/update/delete
:name => 'Ticket',
:data => {
:mappings => {
:Ticket => {
@ -148,17 +152,14 @@ create/update/delete index
SearchIndexBackend.index(
:action => 'delete', # create/update/delete
:name => 'Ticket', # optional
:name => 'Ticket',
)
SearchIndexBackend.index(
:action => 'delete', # create/update/delete
)
=end
def self.index(data)
url = build_url(data[:name])
url = build_url(data[:name], nil, false, false)
return if url.blank?
if data[:action] && data[:action] == 'delete'
@ -245,7 +246,7 @@ remove whole data from index
=end
def self.remove(type, o_id = nil)
url = build_url(type, o_id)
url = build_url(type, o_id, false, false)
return if url.blank?
Rails.logger.info "# curl -X DELETE \"#{url}\""
@ -275,7 +276,7 @@ remove whole data from index
=begin
@param query [String] search query
@param index [String, Array<String>, nil] indexes to search in (see search_by_index)
@param index [String, Array<String>] indexes to search in (see search_by_index)
@param options [Hash] search options (see build_query)
@return search result
@ -305,7 +306,7 @@ remove whole data from index
=end
def self.search(query, index = nil, options = {})
def self.search(query, index, options = {})
if !index.is_a? Array
return search_by_index(query, index, options)
end
@ -318,29 +319,21 @@ remove whole data from index
=begin
@param query [String] search query
@param index [String, Array<String>, nil] index name or list of index names. If index is nil or not present will, search will be performed globally
@param query [String] search query
@param index [String] index name
@param options [Hash] search options (see build_query)
@return search result
=end
def self.search_by_index(query, index = nil, options = {})
def self.search_by_index(query, index, options = {})
return [] if query.blank?
url = build_url
return if url.blank?
url += if index
if index.is_a?(Array)
"/#{index.join(',')}/_search"
else
"/#{index}/_search"
end
else
'/_search'
end
url += build_search_url(index)
# real search condition
condition = {
@ -396,8 +389,8 @@ remove whole data from index
Rails.logger.info "... #{item['_type']} #{item['_id']}"
output = {
id: item['_id'].to_i,
type: item['_type'],
id: item['_id'],
type: index,
}
if options.dig(:highlight_fields_by_indexes, index.to_sym)
@ -434,15 +427,6 @@ remove whole data from index
)
end
# add sorting by active if active is not part of the query
if result.flat_map(&:keys).exclude?(:active)
result.unshift(
active: {
order: 'desc',
},
)
end
result.push('_score')
result
@ -456,7 +440,7 @@ get count of tickets and tickets which match on selector
example with a simple search:
result = SearchIndexBackend.selectors('Ticket', { category: { operator: 'is', value: 'aa::ab' } })
result = SearchIndexBackend.selectors('Ticket', { 'category' => { 'operator' => 'is', 'value' => 'aa::ab' } })
result = [
{ id: 1, type: 'Ticket' },
@ -482,7 +466,7 @@ example for aggregations within one year
current_user: User.find(123),
}
result = SearchIndexBackend.selectors('Ticket', { category: { operator: 'is', value: 'aa::ab' } }, options, aggs_interval)
result = SearchIndexBackend.selectors('Ticket', { 'category' => { 'operator' => 'is', 'value' => 'aa::ab' } }, options, aggs_interval)
result = {
hits:{
@ -509,21 +493,13 @@ example for aggregations within one year
=end
def self.selectors(index = nil, selectors = nil, options = {}, aggs_interval = nil)
def self.selectors(index, selectors = nil, options = {}, aggs_interval = nil)
raise 'no selectors given' if !selectors
url = build_url
url = build_url(nil, nil, false, false)
return if url.blank?
url += if index
if index.is_a?(Array)
"/#{index.join(',')}/_search"
else
"/#{index}/_search"
end
else
'/_search'
end
url += build_search_url(index)
data = selector2query(selectors, options, aggs_interval)
@ -669,9 +645,9 @@ example for aggregations within one year
t[:range] = {}
t[:range][key_tmp] = {}
if data['operator'] == 'before (absolute)'
t[:range][key_tmp][:lt] = (data['value']).to_s
t[:range][key_tmp][:lt] = (data['value'])
else
t[:range][key_tmp][:gt] = (data['value']).to_s
t[:range][key_tmp][:gt] = (data['value'])
end
query_must.push t
else
@ -745,25 +721,71 @@ return true if backend is configured
true
end
def self.build_url(type = nil, o_id = nil)
def self.build_index_name(index)
local_index = "#{Setting.get('es_index')}_#{Rails.env}"
"#{local_index}_#{index.underscore.tr('/', '_')}"
end
def self.build_url(type = nil, o_id = nil, pipeline = true, with_type = true)
return if !SearchIndexBackend.enabled?
# for elasticsearch 5.6 and lower
index = "#{Setting.get('es_index')}_#{Rails.env}"
url = Setting.get('es_url')
url = if type
url_pipline = Setting.get('es_pipeline')
if url_pipline.present?
url_pipline = "?pipeline=#{url_pipline}"
end
if o_id
"#{url}/#{index}/#{type}/#{o_id}#{url_pipline}"
if Setting.get('es_multi_index') == false
url = Setting.get('es_url')
url = if type
url_pipline = Setting.get('es_pipeline')
if url_pipline.present?
url_pipline = "?pipeline=#{url_pipline}"
end
if o_id
"#{url}/#{index}/#{type}/#{o_id}#{url_pipline}"
else
"#{url}/#{index}/#{type}#{url_pipline}"
end
else
"#{url}/#{index}/#{type}#{url_pipline}"
"#{url}/#{index}"
end
else
"#{url}/#{index}"
end
url
return url
end
# for elasticsearch 6.x and higher
url = Setting.get('es_url')
if pipeline == true
url_pipline = Setting.get('es_pipeline')
if url_pipline.present?
url_pipline = "?pipeline=#{url_pipline}"
end
end
if type
index = build_index_name(type)
if with_type == false
return "#{url}/#{index}"
end
if o_id
return "#{url}/#{index}/_doc/#{o_id}#{url_pipline}"
end
return "#{url}/#{index}/_doc#{url_pipline}"
end
"#{url}/"
end
def self.build_search_url(index)
# for elasticsearch 5.6 and lower
if Setting.get('es_multi_index') == false
if index
return "/#{index}/_search"
end
return '/_search'
end
# for elasticsearch 6.x and higher
"#{build_index_name(index)}/_doc/_search"
end
def self.humanized_error(verb:, url:, payload: nil, response:)
@ -833,4 +855,5 @@ return true if backend is configured
data
end
end

View file

@ -3,12 +3,21 @@ require 'rubygems'
namespace :searchindex do
task :drop, [:opts] => :environment do |_t, _args|
print 'drop indexes...'
# drop indexes
print 'drop indexes...'
SearchIndexBackend.index(
action: 'delete',
)
if es_multi_index?
Models.indexable.each do |local_object|
SearchIndexBackend.index(
action: 'delete',
name: local_object.name,
)
end
else
SearchIndexBackend.index(
action: 'delete',
)
end
puts 'done'
Rake::Task['searchindex:drop_pipeline'].execute
@ -17,32 +26,40 @@ namespace :searchindex do
task :create, [:opts] => :environment do |_t, _args|
print 'create indexes...'
# es with mapper-attachments plugin
info = SearchIndexBackend.info
number = nil
if info.present?
number = info['version']['number'].to_s
if es_multi_index?
Setting.set('es_multi_index', true)
else
Setting.set('es_multi_index', false)
end
settings = {
'index.mapping.total_fields.limit': 2000,
}
mapping = {}
Models.indexable.each do |local_object|
mapping.merge!(get_mapping_properties_object(local_object))
end
# create indexes
SearchIndexBackend.index(
action: 'create',
data: {
mappings: mapping,
settings: settings,
}
)
if number.blank? || number =~ /^[2-4]\./ || number =~ /^5\.[0-5]\./
Setting.set('es_pipeline', '')
if es_multi_index?
Models.indexable.each do |local_object|
SearchIndexBackend.index(
action: 'create',
name: local_object.name,
data: {
mappings: get_mapping_properties_object(local_object),
settings: settings,
}
)
end
else
mapping = {}
Models.indexable.each do |local_object|
mapping.merge!(get_mapping_properties_object(local_object))
end
SearchIndexBackend.index(
action: 'create',
data: {
mappings: mapping,
settings: settings,
}
)
end
puts 'done'
@ -51,14 +68,10 @@ namespace :searchindex do
end
task :create_pipeline, [:opts] => :environment do |_t, _args|
# es with mapper-attachments plugin
info = SearchIndexBackend.info
number = nil
if info.present?
number = info['version']['number'].to_s
if !es_pipeline?
Setting.set('es_pipeline', '')
next
end
next if number.blank? || number =~ /^[2-4]\./ || number =~ /^5\.[0-5]\./
# update processors
pipeline = Setting.get('es_pipeline')
@ -103,14 +116,7 @@ namespace :searchindex do
end
task :drop_pipeline, [:opts] => :environment do |_t, _args|
# es with mapper-attachments plugin
info = SearchIndexBackend.info
number = nil
if info.present?
number = info['version']['number'].to_s
end
next if number.blank? || number =~ /^[2-4]\./ || number =~ /^5\.[0-5]\./
next if !es_pipeline?
# update processors
pipeline = Setting.get('es_pipeline')
@ -142,11 +148,9 @@ namespace :searchindex do
end
task :rebuild, [:opts] => :environment do |_t, _args|
Rake::Task['searchindex:drop'].execute
Rake::Task['searchindex:create'].execute
Rake::Task['searchindex:reload'].execute
end
end
@ -172,74 +176,87 @@ mapping = {
=end
def get_mapping_properties_object(object)
name = object.name
if es_multi_index?
name = '_doc'
end
result = {
object.name => {
name => {
properties: {}
}
}
store_columns = %w[preferences data]
# for elasticsearch 6.x and later
string_type = 'text'
string_raw = { 'type': 'keyword' }
boolean_raw = { 'type': 'boolean' }
# for elasticsearch 5.6 and lower
if !es_multi_index?
string_type = 'string'
string_raw = { 'type': 'string', 'index': 'not_analyzed' }
boolean_raw = { 'type': 'boolean', 'index': 'not_analyzed' }
end
object.columns_hash.each do |key, value|
if value.type == :string && value.limit && value.limit <= 5000 && store_columns.exclude?(key)
result[object.name][:properties][key] = {
type: 'string',
result[name][:properties][key] = {
type: string_type,
fields: {
raw: { 'type': 'string', 'index': 'not_analyzed' }
raw: string_raw,
}
}
elsif value.type == :integer
result[object.name][:properties][key] = {
result[name][:properties][key] = {
type: 'integer',
}
elsif value.type == :datetime
result[object.name][:properties][key] = {
result[name][:properties][key] = {
type: 'date',
}
elsif value.type == :boolean
result[object.name][:properties][key] = {
result[name][:properties][key] = {
type: 'boolean',
fields: {
raw: { 'type': 'boolean', 'index': 'not_analyzed' }
raw: boolean_raw,
}
}
elsif value.type == :binary
result[object.name][:properties][key] = {
result[name][:properties][key] = {
type: 'binary',
}
elsif value.type == :bigint
result[object.name][:properties][key] = {
result[name][:properties][key] = {
type: 'long',
}
elsif value.type == :decimal
result[object.name][:properties][key] = {
result[name][:properties][key] = {
type: 'float',
}
elsif value.type == :date
result[object.name][:properties][key] = {
result[name][:properties][key] = {
type: 'date',
}
end
end
# es with mapper-attachments plugin
info = SearchIndexBackend.info
number = nil
if info.present?
number = info['version']['number'].to_s
end
if object.name == 'Ticket'
result[object.name][:_source] = {
# do not server attachments if document is requested
result[name][:_source] = {
excludes: ['article.attachment']
}
if number.blank? || number =~ /^[2-4]\./ || number =~ /^5\.[0-5]\./
result[object.name][:_source] = {
# for elasticsearch 5.5 and lower
if !es_pipeline?
result[name][:_source] = {
excludes: ['article.attachment']
}
result[object.name][:properties][:article] = {
result[name][:properties][:article] = {
type: 'nested',
include_in_parent: true,
properties: {
@ -251,5 +268,45 @@ def get_mapping_properties_object(object)
end
end
result
return result if es_type_in_mapping?
result[name]
end
# get es version
def es_version
info = SearchIndexBackend.info
number = nil
if info.present?
number = info['version']['number'].to_s
end
number
end
# no es_pipeline for elasticsearch 5.5 and lower
def es_pipeline?
number = es_version
return false if number.blank?
return false if number =~ /^[2-4]\./
return false if number =~ /^5\.[0-5]\./
true
end
# no mulit index for elasticsearch 5.6 and lower
def es_multi_index?
number = es_version
return false if number.blank?
return false if number =~ /^[2-5]\./
true
end
# no type in mapping
def es_type_in_mapping?
number = es_version
return true if number.blank?
return true if number =~ /^[2-6]\./
false
end

View file

@ -151,18 +151,14 @@ RSpec.describe 'Form', type: :request, searchindex: true do
token = json_response['token']
(1..20).each do |count|
travel 10.seconds
post '/api/v1/form_submit', params: { fingerprint: fingerprint, token: token, name: 'Bob Smith', email: 'discard@znuny.com', title: "test#{count}", body: 'hello' }, as: :json
expect(response).to have_http_status(:ok)
expect(json_response).to be_a_kind_of(Hash)
expect(json_response['errors']).to be_falsey
expect(json_response['errors']).to be_falsey
expect(json_response['ticket']).to be_truthy
expect(json_response['ticket']['id']).to be_truthy
expect(json_response['ticket']['number']).to be_truthy
Scheduler.worker(true)
sleep 1 # wait until elasticsearch is index
end
sleep 10 # wait until elasticsearch is index
@ -175,7 +171,6 @@ RSpec.describe 'Form', type: :request, searchindex: true do
@headers = { 'ACCEPT' => 'application/json', 'CONTENT_TYPE' => 'application/json', 'REMOTE_ADDR' => '1.2.3.5' }
(1..20).each do |count|
travel 10.seconds
post '/api/v1/form_submit', params: { fingerprint: fingerprint, token: token, name: 'Bob Smith', email: 'discard@znuny.com', title: "test-2-#{count}", body: 'hello' }, as: :json
expect(response).to have_http_status(:ok)
expect(json_response).to be_a_kind_of(Hash)
@ -183,9 +178,27 @@ RSpec.describe 'Form', type: :request, searchindex: true do
expect(json_response['errors']).to be_falsey
expect(json_response['ticket']).to be_truthy
expect(json_response['ticket']['id']).to be_truthy
expect(json_response['ticket']['number']).to be_truthy
Scheduler.worker(true)
sleep 1 # wait until elasticsearch is index
end
sleep 10 # wait until elasticsearch is index
post '/api/v1/form_submit', params: { fingerprint: fingerprint, token: token, name: 'Bob Smith', email: 'discard@znuny.com', title: 'test-2-last', body: 'hello' }, as: :json
expect(response).to have_http_status(:unauthorized)
expect(json_response).to be_a_kind_of(Hash)
expect(json_response['error']).to be_truthy
@headers = { 'ACCEPT' => 'application/json', 'CONTENT_TYPE' => 'application/json', 'REMOTE_ADDR' => '::1' }
(1..20).each do |count|
post '/api/v1/form_submit', params: { fingerprint: fingerprint, token: token, name: 'Bob Smith', email: 'discard@znuny.com', title: "test-2-#{count}", body: 'hello' }, as: :json
expect(response).to have_http_status(:ok)
expect(json_response).to be_a_kind_of(Hash)
expect(json_response['errors']).to be_falsey
expect(json_response['ticket']).to be_truthy
expect(json_response['ticket']['id']).to be_truthy
Scheduler.worker(true)
end
sleep 10 # wait until elasticsearch is index