2021-06-01 12:20:20 +00:00
|
|
|
# Copyright (C) 2012-2021 Zammad Foundation, http://zammad-foundation.org/
|
2014-01-27 22:59:41 +00:00
|
|
|
|
|
|
|
class SearchIndexBackend
|
|
|
|
|
|
|
|
=begin
|
|
|
|
|
2017-12-04 00:24:58 +00:00
|
|
|
info about used search index machine
|
|
|
|
|
|
|
|
SearchIndexBackend.info
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
|
|
|
def self.info
|
|
|
|
url = Setting.get('es_url').to_s
|
2017-12-04 01:12:55 +00:00
|
|
|
return if url.blank?
|
2018-10-09 06:17:41 +00:00
|
|
|
|
2019-09-24 09:35:05 +00:00
|
|
|
response = make_request(url)
|
|
|
|
|
2018-03-08 15:54:51 +00:00
|
|
|
if response.success?
|
|
|
|
installed_version = response.data.dig('version', 'number')
|
|
|
|
raise "Unable to get elasticsearch version from response: #{response.inspect}" if installed_version.blank?
|
2018-10-09 06:17:41 +00:00
|
|
|
|
2021-07-14 14:04:54 +00:00
|
|
|
installed_version_parsed = Gem::Version.new(installed_version)
|
|
|
|
|
|
|
|
version_supported = installed_version_parsed < Gem::Version.new('8')
|
2019-06-20 10:45:27 +00:00
|
|
|
raise "Version #{installed_version} of configured elasticsearch is not supported." if !version_supported
|
|
|
|
|
2021-07-14 14:04:54 +00:00
|
|
|
version_supported = installed_version_parsed >= Gem::Version.new('7.8')
|
2019-06-20 10:45:27 +00:00
|
|
|
raise "Version #{installed_version} of configured elasticsearch is not supported." if !version_supported
|
2018-10-09 06:17:41 +00:00
|
|
|
|
2018-03-08 15:54:51 +00:00
|
|
|
return response.data
|
|
|
|
end
|
2018-03-08 15:30:07 +00:00
|
|
|
|
|
|
|
raise humanized_error(
|
|
|
|
verb: 'GET',
|
|
|
|
url: url,
|
|
|
|
response: response,
|
|
|
|
)
|
2017-12-04 00:24:58 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
=begin
|
|
|
|
|
|
|
|
update processors
|
|
|
|
|
|
|
|
SearchIndexBackend.processors(
|
|
|
|
_ingest/pipeline/attachment: {
|
|
|
|
description: 'Extract attachment information from arrays',
|
|
|
|
processors: [
|
|
|
|
{
|
|
|
|
foreach: {
|
|
|
|
field: 'ticket.articles.attachments',
|
|
|
|
processor: {
|
|
|
|
attachment: {
|
|
|
|
target_field: '_ingest._value.attachment',
|
|
|
|
field: '_ingest._value.data'
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
|
|
|
def self.processors(data)
|
|
|
|
data.each do |key, items|
|
|
|
|
url = "#{Setting.get('es_url')}/#{key}"
|
|
|
|
|
|
|
|
items.each do |item|
|
|
|
|
if item[:action] == 'delete'
|
2019-09-24 09:35:05 +00:00
|
|
|
response = make_request(url, method: :delete)
|
|
|
|
|
2017-12-04 00:48:41 +00:00
|
|
|
next if response.success?
|
2017-12-15 13:58:41 +00:00
|
|
|
next if response.code.to_s == '404'
|
2018-03-08 15:30:07 +00:00
|
|
|
|
|
|
|
raise humanized_error(
|
|
|
|
verb: 'DELETE',
|
|
|
|
url: url,
|
|
|
|
response: response,
|
|
|
|
)
|
2017-12-04 00:24:58 +00:00
|
|
|
end
|
2019-09-24 09:35:05 +00:00
|
|
|
|
2017-12-04 00:24:58 +00:00
|
|
|
item.delete(:action)
|
2019-09-24 09:35:05 +00:00
|
|
|
|
|
|
|
make_request_and_validate(url, data: item, method: :put)
|
2017-12-04 00:24:58 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
true
|
|
|
|
end
|
|
|
|
|
|
|
|
=begin
|
|
|
|
|
2014-02-03 12:08:41 +00:00
|
|
|
create/update/delete index
|
|
|
|
|
|
|
|
SearchIndexBackend.index(
|
|
|
|
:action => 'create', # create/update/delete
|
2019-06-20 10:45:27 +00:00
|
|
|
:name => 'Ticket',
|
2014-02-03 12:08:41 +00:00
|
|
|
:data => {
|
|
|
|
:mappings => {
|
|
|
|
:Ticket => {
|
|
|
|
:properties => {
|
2015-04-06 19:00:16 +00:00
|
|
|
:articles => {
|
2014-02-03 12:08:41 +00:00
|
|
|
:type => 'nested',
|
|
|
|
:properties => {
|
2017-06-16 22:56:28 +00:00
|
|
|
'attachment' => { :type => 'attachment' }
|
2014-04-28 15:30:06 +00:00
|
|
|
}
|
2014-02-03 12:08:41 +00:00
|
|
|
}
|
2014-04-28 15:30:06 +00:00
|
|
|
}
|
|
|
|
}
|
2014-02-03 12:08:41 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
SearchIndexBackend.index(
|
|
|
|
:action => 'delete', # create/update/delete
|
2019-06-20 10:45:27 +00:00
|
|
|
:name => 'Ticket',
|
2014-02-03 12:08:41 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
|
|
|
def self.index(data)
|
|
|
|
|
2020-02-18 14:49:52 +00:00
|
|
|
url = build_url(type: data[:name], with_pipeline: false, with_document_type: false)
|
2017-12-15 13:58:41 +00:00
|
|
|
return if url.blank?
|
2014-02-03 12:08:41 +00:00
|
|
|
|
|
|
|
if data[:action] && data[:action] == 'delete'
|
2016-04-26 12:42:55 +00:00
|
|
|
return SearchIndexBackend.remove(data[:name])
|
2014-02-03 12:08:41 +00:00
|
|
|
end
|
|
|
|
|
2019-09-24 09:35:05 +00:00
|
|
|
make_request_and_validate(url, data: data[:data], method: :put)
|
2014-02-03 12:08:41 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
=begin
|
|
|
|
|
2014-01-27 22:59:41 +00:00
|
|
|
add new object to search index
|
|
|
|
|
2016-04-26 12:42:55 +00:00
|
|
|
SearchIndexBackend.add('Ticket', some_data_object)
|
2014-01-27 22:59:41 +00:00
|
|
|
|
|
|
|
=end
|
|
|
|
|
|
|
|
def self.add(type, data)
|
|
|
|
|
2020-02-18 14:49:52 +00:00
|
|
|
url = build_url(type: type, object_id: data['id'])
|
2017-12-15 13:58:41 +00:00
|
|
|
return if url.blank?
|
2014-01-27 22:59:41 +00:00
|
|
|
|
2019-09-24 09:35:05 +00:00
|
|
|
make_request_and_validate(url, data: data, method: :post)
|
2014-01-27 22:59:41 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
=begin
|
|
|
|
|
2020-02-18 14:49:52 +00:00
|
|
|
This function updates specifc attributes of an index based on a query.
|
|
|
|
|
|
|
|
data = {
|
|
|
|
organization: {
|
|
|
|
name: "Zammad Foundation"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
where = {
|
|
|
|
organization_id: 1
|
|
|
|
}
|
|
|
|
SearchIndexBackend.update_by_query('Ticket', data, where)
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
|
|
|
def self.update_by_query(type, data, where)
|
|
|
|
return if data.blank?
|
|
|
|
return if where.blank?
|
|
|
|
|
|
|
|
url = build_url(type: type, action: '_update_by_query', with_pipeline: false, with_document_type: false, url_params: { conflicts: 'proceed' })
|
|
|
|
return if url.blank?
|
|
|
|
|
|
|
|
script_list = []
|
|
|
|
data.each do |key, _value|
|
|
|
|
script_list.push("ctx._source.#{key}=params.#{key}")
|
|
|
|
end
|
|
|
|
|
|
|
|
data = {
|
|
|
|
script: {
|
|
|
|
lang: 'painless',
|
|
|
|
source: script_list.join(';'),
|
|
|
|
params: data,
|
|
|
|
},
|
|
|
|
query: {
|
|
|
|
term: where,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
make_request_and_validate(url, data: data, method: :post, read_timeout: 10.minutes)
|
|
|
|
end
|
|
|
|
|
|
|
|
=begin
|
|
|
|
|
2014-01-27 22:59:41 +00:00
|
|
|
remove whole data from index
|
|
|
|
|
2016-04-26 12:42:55 +00:00
|
|
|
SearchIndexBackend.remove('Ticket', 123)
|
2014-01-27 22:59:41 +00:00
|
|
|
|
2016-04-26 12:42:55 +00:00
|
|
|
SearchIndexBackend.remove('Ticket')
|
2014-01-27 22:59:41 +00:00
|
|
|
|
|
|
|
=end
|
|
|
|
|
2016-04-26 12:42:55 +00:00
|
|
|
def self.remove(type, o_id = nil)
|
2019-09-26 11:41:20 +00:00
|
|
|
url = if o_id
|
2020-02-18 14:49:52 +00:00
|
|
|
build_url(type: type, object_id: o_id, with_pipeline: false, with_document_type: true)
|
2019-09-26 11:41:20 +00:00
|
|
|
else
|
2020-02-18 14:49:52 +00:00
|
|
|
build_url(type: type, object_id: o_id, with_pipeline: false, with_document_type: false)
|
2019-09-26 11:41:20 +00:00
|
|
|
end
|
|
|
|
|
2017-12-15 13:58:41 +00:00
|
|
|
return if url.blank?
|
2014-01-27 22:59:41 +00:00
|
|
|
|
2019-09-24 09:35:05 +00:00
|
|
|
response = make_request(url, method: :delete)
|
2014-01-27 22:59:41 +00:00
|
|
|
|
2015-03-23 00:31:30 +00:00
|
|
|
return true if response.success?
|
2017-12-15 13:58:41 +00:00
|
|
|
return true if response.code.to_s == '400'
|
2018-03-08 15:30:07 +00:00
|
|
|
|
|
|
|
humanized_error = humanized_error(
|
|
|
|
verb: 'DELETE',
|
|
|
|
url: url,
|
|
|
|
response: response,
|
|
|
|
)
|
2020-03-12 08:23:19 +00:00
|
|
|
Rails.logger.warn "Can't delete index: #{humanized_error}"
|
2015-04-06 19:00:16 +00:00
|
|
|
false
|
2014-01-27 22:59:41 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
=begin
|
|
|
|
|
2018-11-06 16:11:10 +00:00
|
|
|
@param query [String] search query
|
2019-06-20 10:45:27 +00:00
|
|
|
@param index [String, Array<String>] indexes to search in (see search_by_index)
|
2018-11-06 16:11:10 +00:00
|
|
|
@param options [Hash] search options (see build_query)
|
2014-01-27 22:59:41 +00:00
|
|
|
|
2018-11-06 16:11:10 +00:00
|
|
|
@return search result
|
2014-09-19 21:35:40 +00:00
|
|
|
|
2018-11-06 16:11:10 +00:00
|
|
|
@example Sample queries
|
2014-01-27 22:59:41 +00:00
|
|
|
|
2018-11-06 16:11:10 +00:00
|
|
|
result = SearchIndexBackend.search('search query', ['User', 'Organization'], limit: limit)
|
|
|
|
|
2019-08-28 09:41:04 +00:00
|
|
|
- result = SearchIndexBackend.search('search query', 'User', limit: limit)
|
2018-11-06 16:11:10 +00:00
|
|
|
|
|
|
|
result = SearchIndexBackend.search('search query', 'User', limit: limit, sort_by: ['updated_at'], order_by: ['desc'])
|
2018-07-18 14:00:06 +00:00
|
|
|
|
2019-08-28 09:41:04 +00:00
|
|
|
result = SearchIndexBackend.search('search query', 'User', limit: limit, sort_by: ['active', updated_at'], order_by: ['desc', 'desc'])
|
|
|
|
|
2014-09-19 21:35:40 +00:00
|
|
|
result = [
|
|
|
|
{
|
|
|
|
:id => 123,
|
|
|
|
:type => 'User',
|
|
|
|
},
|
|
|
|
{
|
|
|
|
:id => 125,
|
|
|
|
:type => 'User',
|
|
|
|
},
|
|
|
|
{
|
|
|
|
:id => 15,
|
|
|
|
:type => 'Organization',
|
|
|
|
}
|
|
|
|
]
|
|
|
|
|
2014-01-27 22:59:41 +00:00
|
|
|
=end
|
|
|
|
|
2019-06-20 10:45:27 +00:00
|
|
|
def self.search(query, index, options = {})
|
2018-11-06 16:11:10 +00:00
|
|
|
if !index.is_a? Array
|
|
|
|
return search_by_index(query, index, options)
|
2016-07-07 23:22:09 +00:00
|
|
|
end
|
2018-11-06 16:11:10 +00:00
|
|
|
|
|
|
|
index
|
2021-06-24 07:05:39 +00:00
|
|
|
.filter_map { |local_index| search_by_index(query, local_index, options) }
|
2018-11-06 16:11:10 +00:00
|
|
|
.flatten(1)
|
2016-07-07 23:22:09 +00:00
|
|
|
end
|
|
|
|
|
2018-11-06 16:11:10 +00:00
|
|
|
=begin
|
|
|
|
|
2019-06-20 10:45:27 +00:00
|
|
|
@param query [String] search query
|
|
|
|
@param index [String] index name
|
2018-11-06 16:11:10 +00:00
|
|
|
@param options [Hash] search options (see build_query)
|
|
|
|
|
|
|
|
@return search result
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
2019-06-20 10:45:27 +00:00
|
|
|
def self.search_by_index(query, index, options = {})
|
2017-12-15 13:58:41 +00:00
|
|
|
return [] if query.blank?
|
2014-01-29 23:51:55 +00:00
|
|
|
|
2020-02-18 14:49:52 +00:00
|
|
|
url = build_url(type: index, action: '_search', with_pipeline: false, with_document_type: true)
|
2019-07-04 13:23:58 +00:00
|
|
|
return [] if url.blank?
|
2018-10-09 06:17:41 +00:00
|
|
|
|
2014-02-03 12:08:41 +00:00
|
|
|
# real search condition
|
2014-02-02 18:58:31 +00:00
|
|
|
condition = {
|
|
|
|
'query_string' => {
|
2018-12-19 17:31:51 +00:00
|
|
|
'query' => append_wildcard_to_simple_query(query),
|
2021-07-06 18:40:09 +00:00
|
|
|
'time_zone' => Setting.get('timezone_default').presence || 'UTC',
|
2017-05-15 16:20:39 +00:00
|
|
|
'default_operator' => 'AND',
|
2018-10-02 11:50:50 +00:00
|
|
|
'analyze_wildcard' => true,
|
2014-02-02 18:58:31 +00:00
|
|
|
}
|
|
|
|
}
|
2018-11-06 16:11:10 +00:00
|
|
|
|
2019-06-04 03:40:48 +00:00
|
|
|
if (fields = options.dig(:highlight_fields_by_indexes, index.to_sym))
|
|
|
|
condition['query_string']['fields'] = fields
|
|
|
|
end
|
|
|
|
|
2018-11-06 16:11:10 +00:00
|
|
|
query_data = build_query(condition, options)
|
2014-01-29 23:51:55 +00:00
|
|
|
|
2019-06-04 03:40:48 +00:00
|
|
|
if (fields = options.dig(:highlight_fields_by_indexes, index.to_sym))
|
2021-06-23 11:35:27 +00:00
|
|
|
fields_for_highlight = fields.index_with { |_elem| {} }
|
2019-06-04 03:40:48 +00:00
|
|
|
|
|
|
|
query_data[:highlight] = { fields: fields_for_highlight }
|
|
|
|
end
|
|
|
|
|
2019-09-24 09:35:05 +00:00
|
|
|
response = make_request(url, data: query_data)
|
2015-03-23 00:31:30 +00:00
|
|
|
|
2014-02-03 18:26:22 +00:00
|
|
|
if !response.success?
|
2018-03-08 15:30:07 +00:00
|
|
|
Rails.logger.error humanized_error(
|
|
|
|
verb: 'GET',
|
|
|
|
url: url,
|
2018-11-06 16:11:10 +00:00
|
|
|
payload: query_data,
|
2018-03-08 15:30:07 +00:00
|
|
|
response: response,
|
|
|
|
)
|
2014-02-03 19:40:42 +00:00
|
|
|
return []
|
2014-02-03 18:26:22 +00:00
|
|
|
end
|
2018-11-06 16:11:10 +00:00
|
|
|
data = response.data&.dig('hits', 'hits')
|
2014-02-03 18:26:22 +00:00
|
|
|
|
2018-11-06 16:11:10 +00:00
|
|
|
return [] if !data
|
2018-10-09 06:17:41 +00:00
|
|
|
|
2018-11-06 16:11:10 +00:00
|
|
|
data.map do |item|
|
2021-05-10 14:18:43 +00:00
|
|
|
Rails.logger.debug { "... #{item['_type']} #{item['_id']}" }
|
2018-11-06 16:11:10 +00:00
|
|
|
|
2019-06-04 03:40:48 +00:00
|
|
|
output = {
|
2019-06-20 10:45:27 +00:00
|
|
|
id: item['_id'],
|
|
|
|
type: index,
|
2014-09-19 21:35:40 +00:00
|
|
|
}
|
2019-06-04 03:40:48 +00:00
|
|
|
|
|
|
|
if options.dig(:highlight_fields_by_indexes, index.to_sym)
|
|
|
|
output[:highlight] = item['highlight']
|
|
|
|
end
|
|
|
|
|
|
|
|
output
|
2017-10-01 12:25:52 +00:00
|
|
|
end
|
2014-01-27 22:59:41 +00:00
|
|
|
end
|
|
|
|
|
2018-11-06 16:11:10 +00:00
|
|
|
def self.search_by_index_sort(sort_by = nil, order_by = nil)
|
2021-08-17 05:44:54 +00:00
|
|
|
result = (sort_by || [])
|
|
|
|
.map(&:to_s)
|
|
|
|
.each_with_object([])
|
|
|
|
.each_with_index do |(elem, memo), index|
|
|
|
|
next if elem.blank?
|
|
|
|
next if order_by&.at(index).blank?
|
|
|
|
|
|
|
|
# for sorting values use .keyword values (no analyzer is used - plain values)
|
|
|
|
if elem !~ %r{\.} && elem !~ %r{_(time|date|till|id|ids|at)$} && elem != 'id'
|
|
|
|
elem += '.keyword'
|
|
|
|
end
|
2018-07-18 14:00:06 +00:00
|
|
|
|
2021-08-17 05:44:54 +00:00
|
|
|
memo.push(
|
|
|
|
elem => {
|
|
|
|
order: order_by[index],
|
|
|
|
},
|
|
|
|
)
|
2018-07-18 14:00:06 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
if result.blank?
|
|
|
|
result.push(
|
2018-07-19 16:54:12 +00:00
|
|
|
updated_at: {
|
2018-07-18 14:00:06 +00:00
|
|
|
order: 'desc',
|
|
|
|
},
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
result.push('_score')
|
|
|
|
|
|
|
|
result
|
|
|
|
end
|
|
|
|
|
2014-02-02 18:58:31 +00:00
|
|
|
=begin
|
|
|
|
|
2015-10-29 02:33:36 +00:00
|
|
|
get count of tickets and tickets which match on selector
|
2015-10-20 08:48:43 +00:00
|
|
|
|
2019-04-17 06:26:26 +00:00
|
|
|
result = SearchIndexBackend.selectors(index, selector)
|
|
|
|
|
|
|
|
example with a simple search:
|
|
|
|
|
2019-06-20 10:45:27 +00:00
|
|
|
result = SearchIndexBackend.selectors('Ticket', { 'category' => { 'operator' => 'is', 'value' => 'aa::ab' } })
|
2019-04-17 06:26:26 +00:00
|
|
|
|
|
|
|
result = [
|
|
|
|
{ id: 1, type: 'Ticket' },
|
|
|
|
{ id: 2, type: 'Ticket' },
|
|
|
|
{ id: 3, type: 'Ticket' },
|
|
|
|
]
|
|
|
|
|
|
|
|
you also can get aggregations
|
|
|
|
|
|
|
|
result = SearchIndexBackend.selectors(index, selector, options, aggs_interval)
|
|
|
|
|
|
|
|
example for aggregations within one year
|
|
|
|
|
2015-10-29 02:33:36 +00:00
|
|
|
aggs_interval = {
|
|
|
|
from: '2015-01-01',
|
|
|
|
to: '2015-12-31',
|
|
|
|
interval: 'month', # year, quarter, month, week, day, hour, minute, second
|
|
|
|
field: 'created_at',
|
|
|
|
}
|
2015-10-20 08:48:43 +00:00
|
|
|
|
2019-03-26 00:17:17 +00:00
|
|
|
options = {
|
|
|
|
limit: 123,
|
|
|
|
current_user: User.find(123),
|
|
|
|
}
|
|
|
|
|
2019-06-20 10:45:27 +00:00
|
|
|
result = SearchIndexBackend.selectors('Ticket', { 'category' => { 'operator' => 'is', 'value' => 'aa::ab' } }, options, aggs_interval)
|
2015-10-20 08:48:43 +00:00
|
|
|
|
|
|
|
result = {
|
|
|
|
hits:{
|
|
|
|
total:4819,
|
|
|
|
},
|
|
|
|
aggregations:{
|
|
|
|
time_buckets:{
|
|
|
|
buckets:[
|
|
|
|
{
|
|
|
|
key_as_string:"2014-10-01T00:00:00.000Z",
|
|
|
|
key:1412121600000,
|
|
|
|
doc_count:420
|
|
|
|
},
|
|
|
|
{
|
|
|
|
key_as_string:"2014-11-01T00:00:00.000Z",
|
|
|
|
key:1414800000000,
|
|
|
|
doc_count:561
|
|
|
|
},
|
|
|
|
...
|
|
|
|
]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
2019-06-20 10:45:27 +00:00
|
|
|
def self.selectors(index, selectors = nil, options = {}, aggs_interval = nil)
|
2016-03-01 14:26:46 +00:00
|
|
|
raise 'no selectors given' if !selectors
|
2015-10-20 08:48:43 +00:00
|
|
|
|
2020-02-18 14:49:52 +00:00
|
|
|
url = build_url(type: index, action: '_search', with_pipeline: false, with_document_type: true)
|
2017-12-15 13:58:41 +00:00
|
|
|
return if url.blank?
|
2018-10-09 06:17:41 +00:00
|
|
|
|
2019-03-26 00:17:17 +00:00
|
|
|
data = selector2query(selectors, options, aggs_interval)
|
2015-10-20 08:48:43 +00:00
|
|
|
|
2021-11-30 12:40:08 +00:00
|
|
|
verify_date_range(url, data)
|
|
|
|
|
2019-09-24 09:35:05 +00:00
|
|
|
response = make_request(url, data: data)
|
2015-10-20 08:48:43 +00:00
|
|
|
|
|
|
|
if !response.success?
|
2018-03-08 15:30:07 +00:00
|
|
|
raise humanized_error(
|
|
|
|
verb: 'GET',
|
|
|
|
url: url,
|
|
|
|
payload: data,
|
|
|
|
response: response,
|
|
|
|
)
|
2015-10-20 08:48:43 +00:00
|
|
|
end
|
2018-03-20 17:47:49 +00:00
|
|
|
Rails.logger.debug { response.data.to_json }
|
2015-10-29 02:33:36 +00:00
|
|
|
|
2017-12-15 13:58:41 +00:00
|
|
|
if aggs_interval.blank? || aggs_interval[:interval].blank?
|
2015-10-29 02:33:36 +00:00
|
|
|
ticket_ids = []
|
2017-10-01 12:25:52 +00:00
|
|
|
response.data['hits']['hits'].each do |item|
|
2015-10-29 02:33:36 +00:00
|
|
|
ticket_ids.push item['_id']
|
2017-10-01 12:25:52 +00:00
|
|
|
end
|
2019-10-30 07:45:31 +00:00
|
|
|
|
|
|
|
# in lower ES 6 versions, we get total count directly, in higher
|
|
|
|
# versions we need to pick it from total has
|
|
|
|
count = response.data['hits']['total']
|
|
|
|
if response.data['hits']['total'].class != Integer
|
|
|
|
count = response.data['hits']['total']['value']
|
|
|
|
end
|
2015-10-29 02:33:36 +00:00
|
|
|
return {
|
2019-10-30 07:45:31 +00:00
|
|
|
count: count,
|
2015-10-29 02:33:36 +00:00
|
|
|
ticket_ids: ticket_ids,
|
|
|
|
}
|
|
|
|
end
|
2015-10-20 08:48:43 +00:00
|
|
|
response.data
|
|
|
|
end
|
|
|
|
|
2019-03-26 00:17:17 +00:00
|
|
|
DEFAULT_SELECTOR_OPTIONS = {
|
|
|
|
limit: 10
|
|
|
|
}.freeze
|
|
|
|
|
|
|
|
def self.selector2query(selector, options, aggs_interval)
|
|
|
|
options = DEFAULT_QUERY_OPTIONS.merge(options.deep_symbolize_keys)
|
|
|
|
|
2020-10-07 14:53:52 +00:00
|
|
|
current_user = options[:current_user]
|
|
|
|
current_user_id = UserInfo.current_user_id
|
|
|
|
if current_user
|
|
|
|
current_user_id = current_user.id
|
|
|
|
end
|
|
|
|
|
2021-01-27 09:58:35 +00:00
|
|
|
query_must = []
|
2015-10-29 02:33:36 +00:00
|
|
|
query_must_not = []
|
2021-01-27 09:58:35 +00:00
|
|
|
relative_map = {
|
2018-12-19 17:31:51 +00:00
|
|
|
day: 'd',
|
|
|
|
year: 'y',
|
|
|
|
month: 'M',
|
|
|
|
hour: 'h',
|
2018-07-03 14:39:42 +00:00
|
|
|
minute: 'm',
|
|
|
|
}
|
2017-12-04 00:24:58 +00:00
|
|
|
if selector.present?
|
2021-03-16 08:59:32 +00:00
|
|
|
operators_is_isnot = ['is', 'is not']
|
|
|
|
|
2017-10-01 12:25:52 +00:00
|
|
|
selector.each do |key, data|
|
2021-03-16 08:59:32 +00:00
|
|
|
|
2021-01-27 09:58:35 +00:00
|
|
|
data = data.clone
|
2021-03-02 07:26:51 +00:00
|
|
|
table, key_tmp = key.split('.')
|
2021-01-27 09:58:35 +00:00
|
|
|
if key_tmp.blank?
|
|
|
|
key_tmp = table
|
|
|
|
table = 'ticket'
|
|
|
|
end
|
|
|
|
|
2019-10-30 07:45:31 +00:00
|
|
|
wildcard_or_term = 'term'
|
2019-11-19 08:03:46 +00:00
|
|
|
if data['value'].is_a?(Array)
|
|
|
|
wildcard_or_term = 'terms'
|
|
|
|
end
|
2015-10-29 02:33:36 +00:00
|
|
|
t = {}
|
2018-07-03 14:39:42 +00:00
|
|
|
|
2019-10-30 07:45:31 +00:00
|
|
|
# use .keyword in case of compare exact values
|
|
|
|
if data['operator'] == 'is' || data['operator'] == 'is not'
|
2020-10-07 14:53:52 +00:00
|
|
|
|
|
|
|
case data['pre_condition']
|
|
|
|
when 'not_set'
|
2021-05-12 11:37:44 +00:00
|
|
|
data['value'] = if key_tmp.match?(%r{^(created_by|updated_by|owner|customer|user)_id})
|
2020-10-07 14:53:52 +00:00
|
|
|
1
|
|
|
|
end
|
|
|
|
when 'current_user.id'
|
|
|
|
raise "Use current_user.id in selector, but no current_user is set #{data.inspect}" if !current_user_id
|
|
|
|
|
|
|
|
data['value'] = []
|
|
|
|
wildcard_or_term = 'terms'
|
|
|
|
|
|
|
|
if key_tmp == 'out_of_office_replacement_id'
|
|
|
|
data['value'].push User.find(current_user_id).out_of_office_agent_of.pluck(:id)
|
|
|
|
else
|
|
|
|
data['value'].push current_user_id
|
|
|
|
end
|
|
|
|
when 'current_user.organization_id'
|
|
|
|
raise "Use current_user.id in selector, but no current_user is set #{data.inspect}" if !current_user_id
|
|
|
|
|
|
|
|
user = User.find_by(id: current_user_id)
|
|
|
|
data['value'] = user.organization_id
|
|
|
|
end
|
|
|
|
|
2019-10-30 07:45:31 +00:00
|
|
|
if data['value'].is_a?(Array)
|
|
|
|
data['value'].each do |value|
|
2021-05-12 11:37:44 +00:00
|
|
|
next if !value.is_a?(String) || value !~ %r{[A-z]}
|
2019-10-30 07:45:31 +00:00
|
|
|
|
|
|
|
key_tmp += '.keyword'
|
2019-04-17 06:26:26 +00:00
|
|
|
break
|
|
|
|
end
|
2021-05-12 11:37:44 +00:00
|
|
|
elsif data['value'].is_a?(String) && %r{[A-z]}.match?(data['value'])
|
2019-10-30 07:45:31 +00:00
|
|
|
key_tmp += '.keyword'
|
2019-04-17 06:26:26 +00:00
|
|
|
end
|
2019-10-30 07:45:31 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
# use .keyword and wildcard search in cases where query contains non A-z chars
|
|
|
|
if data['operator'] == 'contains' || data['operator'] == 'contains not'
|
2021-01-27 09:58:35 +00:00
|
|
|
|
2019-10-30 07:45:31 +00:00
|
|
|
if data['value'].is_a?(Array)
|
|
|
|
data['value'].each_with_index do |value, index|
|
2021-05-12 11:37:44 +00:00
|
|
|
next if !value.is_a?(String) || value !~ %r{[A-z]}
|
2019-10-30 07:45:31 +00:00
|
|
|
|
|
|
|
data['value'][index] = "*#{value}*"
|
|
|
|
key_tmp += '.keyword'
|
|
|
|
wildcard_or_term = 'wildcards'
|
|
|
|
break
|
|
|
|
end
|
2021-05-12 11:37:44 +00:00
|
|
|
elsif data['value'].is_a?(String) && %r{[A-z]}.match?(data['value'])
|
2019-10-30 07:45:31 +00:00
|
|
|
data['value'] = "*#{data['value']}*"
|
|
|
|
key_tmp += '.keyword'
|
|
|
|
wildcard_or_term = 'wildcard'
|
2019-04-17 06:26:26 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-03-16 08:59:32 +00:00
|
|
|
# for pre condition not_set we want to check if values are defined for the object by exists
|
|
|
|
if data['pre_condition'] == 'not_set' && operators_is_isnot.include?(data['operator']) && data['value'].nil?
|
|
|
|
t['exists'] = {
|
|
|
|
field: key_tmp,
|
|
|
|
}
|
|
|
|
|
|
|
|
case data['operator']
|
|
|
|
when 'is'
|
|
|
|
query_must_not.push t
|
|
|
|
when 'is not'
|
|
|
|
query_must.push t
|
|
|
|
end
|
|
|
|
next
|
|
|
|
|
|
|
|
end
|
|
|
|
|
2021-01-27 09:58:35 +00:00
|
|
|
if table != 'ticket'
|
|
|
|
key_tmp = "#{table}.#{key_tmp}"
|
|
|
|
end
|
|
|
|
|
2018-07-03 14:39:42 +00:00
|
|
|
# is/is not/contains/contains not
|
2020-07-13 12:46:08 +00:00
|
|
|
case data['operator']
|
|
|
|
when 'is', 'is not', 'contains', 'contains not'
|
2019-10-30 07:45:31 +00:00
|
|
|
t[wildcard_or_term] = {}
|
|
|
|
t[wildcard_or_term][key_tmp] = data['value']
|
2020-07-13 12:46:08 +00:00
|
|
|
case data['operator']
|
|
|
|
when 'is', 'contains'
|
2018-07-04 16:24:51 +00:00
|
|
|
query_must.push t
|
2020-07-13 12:46:08 +00:00
|
|
|
when 'is not', 'contains not'
|
2018-07-04 16:24:51 +00:00
|
|
|
query_must_not.push t
|
|
|
|
end
|
2020-07-13 12:46:08 +00:00
|
|
|
when 'contains all', 'contains one', 'contains all not', 'contains one not'
|
2018-07-04 16:24:51 +00:00
|
|
|
values = data['value'].split(',').map(&:strip)
|
|
|
|
t[:query_string] = {}
|
2020-07-13 12:46:08 +00:00
|
|
|
case data['operator']
|
|
|
|
when 'contains all'
|
2018-07-04 16:24:51 +00:00
|
|
|
t[:query_string][:query] = "#{key_tmp}:\"#{values.join('" AND "')}\""
|
|
|
|
query_must.push t
|
2020-07-13 12:46:08 +00:00
|
|
|
when 'contains one not'
|
2018-07-04 16:24:51 +00:00
|
|
|
t[:query_string][:query] = "#{key_tmp}:\"#{values.join('" OR "')}\""
|
|
|
|
query_must_not.push t
|
2020-07-13 12:46:08 +00:00
|
|
|
when 'contains one'
|
2018-07-04 16:24:51 +00:00
|
|
|
t[:query_string][:query] = "#{key_tmp}:\"#{values.join('" OR "')}\""
|
|
|
|
query_must.push t
|
2020-07-13 12:46:08 +00:00
|
|
|
when 'contains all not'
|
2018-07-04 16:24:51 +00:00
|
|
|
t[:query_string][:query] = "#{key_tmp}:\"#{values.join('" AND "')}\""
|
|
|
|
query_must_not.push t
|
|
|
|
end
|
2018-07-03 14:39:42 +00:00
|
|
|
|
|
|
|
# within last/within next (relative)
|
2020-07-13 12:46:08 +00:00
|
|
|
when 'within last (relative)', 'within next (relative)'
|
2018-07-03 14:39:42 +00:00
|
|
|
range = relative_map[data['range'].to_sym]
|
|
|
|
if range.blank?
|
|
|
|
raise "Invalid relative_map for range '#{data['range']}'."
|
|
|
|
end
|
2018-10-09 06:17:41 +00:00
|
|
|
|
2018-07-03 14:39:42 +00:00
|
|
|
t[:range] = {}
|
|
|
|
t[:range][key_tmp] = {}
|
|
|
|
if data['operator'] == 'within last (relative)'
|
|
|
|
t[:range][key_tmp][:gte] = "now-#{data['value']}#{range}"
|
|
|
|
else
|
|
|
|
t[:range][key_tmp][:lt] = "now+#{data['value']}#{range}"
|
|
|
|
end
|
2018-07-04 16:24:51 +00:00
|
|
|
query_must.push t
|
2018-07-03 14:39:42 +00:00
|
|
|
|
|
|
|
# before/after (relative)
|
2020-07-13 12:46:08 +00:00
|
|
|
when 'before (relative)', 'after (relative)'
|
2018-07-03 14:39:42 +00:00
|
|
|
range = relative_map[data['range'].to_sym]
|
|
|
|
if range.blank?
|
|
|
|
raise "Invalid relative_map for range '#{data['range']}'."
|
|
|
|
end
|
2018-10-09 06:17:41 +00:00
|
|
|
|
2018-07-03 14:39:42 +00:00
|
|
|
t[:range] = {}
|
|
|
|
t[:range][key_tmp] = {}
|
|
|
|
if data['operator'] == 'before (relative)'
|
|
|
|
t[:range][key_tmp][:lt] = "now-#{data['value']}#{range}"
|
|
|
|
else
|
|
|
|
t[:range][key_tmp][:gt] = "now+#{data['value']}#{range}"
|
|
|
|
end
|
2018-07-04 16:24:51 +00:00
|
|
|
query_must.push t
|
2018-07-03 14:39:42 +00:00
|
|
|
|
2021-04-29 15:34:27 +00:00
|
|
|
# till/from (relative)
|
|
|
|
when 'till (relative)', 'from (relative)'
|
|
|
|
range = relative_map[data['range'].to_sym]
|
|
|
|
if range.blank?
|
|
|
|
raise "Invalid relative_map for range '#{data['range']}'."
|
|
|
|
end
|
|
|
|
|
|
|
|
t[:range] = {}
|
|
|
|
t[:range][key_tmp] = {}
|
|
|
|
if data['operator'] == 'till (relative)'
|
|
|
|
t[:range][key_tmp][:lt] = "now+#{data['value']}#{range}"
|
|
|
|
else
|
|
|
|
t[:range][key_tmp][:gt] = "now-#{data['value']}#{range}"
|
|
|
|
end
|
|
|
|
query_must.push t
|
|
|
|
|
2018-07-03 14:39:42 +00:00
|
|
|
# before/after (absolute)
|
2020-07-13 12:46:08 +00:00
|
|
|
when 'before (absolute)', 'after (absolute)'
|
2018-07-03 14:39:42 +00:00
|
|
|
t[:range] = {}
|
|
|
|
t[:range][key_tmp] = {}
|
|
|
|
if data['operator'] == 'before (absolute)'
|
2019-06-20 10:45:27 +00:00
|
|
|
t[:range][key_tmp][:lt] = (data['value'])
|
2018-07-03 14:39:42 +00:00
|
|
|
else
|
2019-06-20 10:45:27 +00:00
|
|
|
t[:range][key_tmp][:gt] = (data['value'])
|
2018-07-03 14:39:42 +00:00
|
|
|
end
|
|
|
|
query_must.push t
|
2015-10-29 02:33:36 +00:00
|
|
|
else
|
2018-01-15 11:54:26 +00:00
|
|
|
raise "unknown operator '#{data['operator']}' for #{key}"
|
2015-10-29 02:33:36 +00:00
|
|
|
end
|
2017-10-01 12:25:52 +00:00
|
|
|
end
|
2015-10-29 02:33:36 +00:00
|
|
|
end
|
|
|
|
data = {
|
2015-10-29 09:45:29 +00:00
|
|
|
query: {},
|
2019-03-26 00:17:17 +00:00
|
|
|
size: options[:limit],
|
2015-10-29 02:33:36 +00:00
|
|
|
}
|
|
|
|
# add aggs to filter
|
2017-12-15 13:58:41 +00:00
|
|
|
if aggs_interval.present?
|
|
|
|
if aggs_interval[:interval].present?
|
2015-10-29 02:33:36 +00:00
|
|
|
data[:size] = 0
|
|
|
|
data[:aggs] = {
|
|
|
|
time_buckets: {
|
|
|
|
date_histogram: {
|
2018-12-19 17:31:51 +00:00
|
|
|
field: aggs_interval[:field],
|
2015-10-29 02:33:36 +00:00
|
|
|
interval: aggs_interval[:interval],
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-03-26 00:17:17 +00:00
|
|
|
if aggs_interval[:timezone].present?
|
|
|
|
data[:aggs][:time_buckets][:date_histogram][:time_zone] = aggs_interval[:timezone]
|
|
|
|
end
|
2015-10-29 02:33:36 +00:00
|
|
|
end
|
|
|
|
r = {}
|
|
|
|
r[:range] = {}
|
|
|
|
r[:range][aggs_interval[:field]] = {
|
|
|
|
from: aggs_interval[:from],
|
2018-12-19 17:31:51 +00:00
|
|
|
to: aggs_interval[:to],
|
2015-10-29 02:33:36 +00:00
|
|
|
}
|
2016-11-18 14:42:06 +00:00
|
|
|
query_must.push r
|
2015-10-29 02:33:36 +00:00
|
|
|
end
|
|
|
|
|
2017-12-15 13:58:41 +00:00
|
|
|
data[:query][:bool] ||= {}
|
2016-11-18 14:42:06 +00:00
|
|
|
|
2017-11-23 08:09:44 +00:00
|
|
|
if query_must.present?
|
2016-11-18 14:42:06 +00:00
|
|
|
data[:query][:bool][:must] = query_must
|
2015-10-29 02:33:36 +00:00
|
|
|
end
|
2017-11-23 08:09:44 +00:00
|
|
|
if query_must_not.present?
|
2016-11-18 14:42:06 +00:00
|
|
|
data[:query][:bool][:must_not] = query_must_not
|
2015-10-29 02:33:36 +00:00
|
|
|
end
|
|
|
|
|
2015-10-29 09:07:45 +00:00
|
|
|
# add sort
|
2017-12-15 13:58:41 +00:00
|
|
|
if aggs_interval.present? && aggs_interval[:field].present? && aggs_interval[:interval].blank?
|
2015-10-29 09:07:45 +00:00
|
|
|
sort = []
|
|
|
|
sort[0] = {}
|
|
|
|
sort[0][aggs_interval[:field]] = {
|
|
|
|
order: 'desc'
|
|
|
|
}
|
|
|
|
sort[1] = '_score'
|
|
|
|
data['sort'] = sort
|
2019-10-30 07:45:31 +00:00
|
|
|
else
|
|
|
|
data['sort'] = search_by_index_sort(options[:sort_by], options[:order_by])
|
2015-10-29 09:07:45 +00:00
|
|
|
end
|
|
|
|
|
2015-10-29 02:33:36 +00:00
|
|
|
data
|
|
|
|
end
|
|
|
|
|
2015-10-20 08:48:43 +00:00
|
|
|
=begin
|
|
|
|
|
2014-04-28 15:30:06 +00:00
|
|
|
return true if backend is configured
|
2014-02-02 18:58:31 +00:00
|
|
|
|
|
|
|
result = SearchIndexBackend.enabled?
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
|
|
|
def self.enabled?
|
2017-06-28 17:13:52 +00:00
|
|
|
return false if Setting.get('es_url').blank?
|
2018-10-09 06:17:41 +00:00
|
|
|
|
2014-02-02 18:58:31 +00:00
|
|
|
true
|
|
|
|
end
|
|
|
|
|
2020-02-18 14:49:52 +00:00
|
|
|
def self.build_index_name(index = nil)
|
2019-06-20 10:45:27 +00:00
|
|
|
local_index = "#{Setting.get('es_index')}_#{Rails.env}"
|
2020-02-18 14:49:52 +00:00
|
|
|
return local_index if index.blank?
|
2019-06-20 10:45:27 +00:00
|
|
|
|
|
|
|
"#{local_index}_#{index.underscore.tr('/', '_')}"
|
|
|
|
end
|
|
|
|
|
2019-09-26 11:41:20 +00:00
|
|
|
=begin
|
|
|
|
|
|
|
|
generate url for index or document access (only for internal use)
|
|
|
|
|
|
|
|
# url to access single document in index (in case with_pipeline or not)
|
2020-02-18 14:49:52 +00:00
|
|
|
url = SearchIndexBackend.build_url(type: 'User', object_id: 123, with_pipeline: true)
|
2019-09-26 11:41:20 +00:00
|
|
|
|
|
|
|
# url to access whole index
|
2020-02-18 14:49:52 +00:00
|
|
|
url = SearchIndexBackend.build_url(type: 'User')
|
2019-09-26 11:41:20 +00:00
|
|
|
|
|
|
|
# url to access document definition in index (only es6 and higher)
|
2020-02-18 14:49:52 +00:00
|
|
|
url = SearchIndexBackend.build_url(type: 'User', with_pipeline: false, with_document_type: true)
|
2019-09-26 11:41:20 +00:00
|
|
|
|
|
|
|
# base url
|
|
|
|
url = SearchIndexBackend.build_url
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
2020-02-18 14:49:52 +00:00
|
|
|
# rubocop:disable Metrics/ParameterLists
|
|
|
|
def self.build_url(type: nil, action: nil, object_id: nil, with_pipeline: true, with_document_type: true, url_params: {})
|
|
|
|
# rubocop:enable Metrics/ParameterLists
|
2014-02-02 18:58:31 +00:00
|
|
|
return if !SearchIndexBackend.enabled?
|
2018-10-09 06:17:41 +00:00
|
|
|
|
2020-02-18 14:49:52 +00:00
|
|
|
# set index
|
|
|
|
index = build_index_name(type)
|
2019-06-20 10:45:27 +00:00
|
|
|
|
2020-02-18 14:49:52 +00:00
|
|
|
# add pipeline if needed
|
|
|
|
if index && with_pipeline == true
|
2019-06-20 10:45:27 +00:00
|
|
|
url_pipline = Setting.get('es_pipeline')
|
|
|
|
if url_pipline.present?
|
2020-02-18 14:49:52 +00:00
|
|
|
url_params['pipeline'] = url_pipline
|
2019-06-20 10:45:27 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-02-18 14:49:52 +00:00
|
|
|
# prepare url params
|
|
|
|
params_string = ''
|
|
|
|
if url_params.present?
|
2020-09-30 09:07:01 +00:00
|
|
|
params_string = "?#{URI.encode_www_form(url_params)}"
|
2019-06-20 10:45:27 +00:00
|
|
|
end
|
2019-09-26 11:41:20 +00:00
|
|
|
|
2020-02-18 14:49:52 +00:00
|
|
|
url = Setting.get('es_url')
|
|
|
|
return "#{url}#{params_string}" if index.blank?
|
2019-09-26 11:41:20 +00:00
|
|
|
|
2020-02-18 14:49:52 +00:00
|
|
|
# add type information
|
|
|
|
url = "#{url}/#{index}"
|
2019-09-26 11:41:20 +00:00
|
|
|
|
2020-02-18 14:49:52 +00:00
|
|
|
# add document type
|
2021-01-27 09:58:35 +00:00
|
|
|
if with_document_type
|
2020-02-18 14:49:52 +00:00
|
|
|
url = "#{url}/_doc"
|
|
|
|
end
|
2019-06-20 10:45:27 +00:00
|
|
|
|
2020-02-18 14:49:52 +00:00
|
|
|
# add action
|
|
|
|
if action
|
|
|
|
url = "#{url}/#{action}"
|
|
|
|
end
|
2019-06-20 10:45:27 +00:00
|
|
|
|
2020-02-18 14:49:52 +00:00
|
|
|
# add object id
|
|
|
|
if object_id.present?
|
|
|
|
url = "#{url}/#{object_id}"
|
2019-06-20 10:45:27 +00:00
|
|
|
end
|
|
|
|
|
2020-02-18 14:49:52 +00:00
|
|
|
"#{url}#{params_string}"
|
2014-01-28 09:58:49 +00:00
|
|
|
end
|
|
|
|
|
2020-09-30 09:07:01 +00:00
|
|
|
def self.humanized_error(verb:, url:, response:, payload: nil)
|
2018-03-08 15:30:07 +00:00
|
|
|
prefix = "Unable to process #{verb} request to elasticsearch URL '#{url}'."
|
2020-03-12 08:23:19 +00:00
|
|
|
suffix = "\n\nResponse:\n#{response.inspect}\n\n"
|
2018-03-08 15:30:07 +00:00
|
|
|
|
|
|
|
if payload.respond_to?(:to_json)
|
2020-03-12 08:23:19 +00:00
|
|
|
suffix += "Payload:\n#{payload.to_json}"
|
2018-03-08 15:30:07 +00:00
|
|
|
suffix += "\n\nPayload size: #{payload.to_json.bytesize / 1024 / 1024}M"
|
2020-03-12 08:23:19 +00:00
|
|
|
else
|
|
|
|
suffix += "Payload:\n#{payload.inspect}"
|
2018-03-08 15:30:07 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
message = if response&.error&.match?('Connection refused')
|
|
|
|
"Elasticsearch is not reachable, probably because it's not running or even installed."
|
2018-04-03 09:27:58 +00:00
|
|
|
elsif url.end_with?('pipeline/zammad-attachment', 'pipeline=zammad-attachment') && response.code == 400
|
2021-01-27 09:58:35 +00:00
|
|
|
'The installed attachment plugin could not handle the request payload. Ensure that the correct attachment plugin is installed (ingest-attachment).'
|
2018-03-08 15:30:07 +00:00
|
|
|
else
|
|
|
|
'Check the response and payload for detailed information: '
|
|
|
|
end
|
|
|
|
|
2018-05-11 08:16:18 +00:00
|
|
|
result = "#{prefix} #{message}#{suffix}"
|
|
|
|
Rails.logger.error result.first(40_000)
|
|
|
|
result
|
2018-03-08 15:30:07 +00:00
|
|
|
end
|
2018-07-03 07:18:55 +00:00
|
|
|
|
2018-10-02 11:50:50 +00:00
|
|
|
# add * on simple query like "somephrase23"
|
2018-07-03 07:18:55 +00:00
|
|
|
def self.append_wildcard_to_simple_query(query)
|
|
|
|
query.strip!
|
2020-09-30 09:07:01 +00:00
|
|
|
query += '*' if query.exclude?(':')
|
2018-07-03 07:18:55 +00:00
|
|
|
query
|
|
|
|
end
|
2018-11-06 16:11:10 +00:00
|
|
|
|
|
|
|
=begin
|
|
|
|
|
|
|
|
@param condition [Hash] search condition
|
|
|
|
@param options [Hash] search options
|
|
|
|
@option options [Integer] :from
|
|
|
|
@option options [Integer] :limit
|
|
|
|
@option options [Hash] :query_extension applied to ElasticSearch query
|
|
|
|
@option options [Array<String>] :order_by ordering directions, desc or asc
|
|
|
|
@option options [Array<String>] :sort_by fields to sort by
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
|
|
|
DEFAULT_QUERY_OPTIONS = {
|
|
|
|
from: 0,
|
|
|
|
limit: 10
|
|
|
|
}.freeze
|
|
|
|
|
|
|
|
def self.build_query(condition, options = {})
|
|
|
|
options = DEFAULT_QUERY_OPTIONS.merge(options.deep_symbolize_keys)
|
|
|
|
|
|
|
|
data = {
|
|
|
|
from: options[:from],
|
|
|
|
size: options[:limit],
|
|
|
|
sort: search_by_index_sort(options[:sort_by], options[:order_by]),
|
|
|
|
query: {
|
|
|
|
bool: {
|
|
|
|
must: []
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-30 09:07:01 +00:00
|
|
|
if (extension = options[:query_extension])
|
2018-11-06 16:11:10 +00:00
|
|
|
data[:query].deep_merge! extension.deep_dup
|
|
|
|
end
|
|
|
|
|
|
|
|
data[:query][:bool][:must].push condition
|
|
|
|
|
|
|
|
data
|
|
|
|
end
|
2019-06-20 10:45:27 +00:00
|
|
|
|
2019-09-24 13:04:42 +00:00
|
|
|
=begin
|
|
|
|
|
|
|
|
refreshes all indexes to make previous request data visible in future requests
|
|
|
|
|
|
|
|
SearchIndexBackend.refresh
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
|
|
|
def self.refresh
|
|
|
|
return if !enabled?
|
|
|
|
|
|
|
|
url = "#{Setting.get('es_url')}/_all/_refresh"
|
|
|
|
|
2019-09-24 09:35:05 +00:00
|
|
|
make_request_and_validate(url, method: :post)
|
|
|
|
end
|
|
|
|
|
|
|
|
=begin
|
|
|
|
|
|
|
|
helper method for making HTTP calls
|
|
|
|
|
|
|
|
@param url [String] url
|
|
|
|
@option params [Hash] :data is a payload hash
|
|
|
|
@option params [Symbol] :method is a HTTP method
|
|
|
|
@option params [Integer] :open_timeout is HTTP request open timeout
|
|
|
|
@option params [Integer] :read_timeout is HTTP request read timeout
|
|
|
|
|
|
|
|
@return UserAgent response
|
|
|
|
|
|
|
|
=end
|
2020-03-11 15:45:39 +00:00
|
|
|
def self.make_request(url, data: {}, method: :get, open_timeout: 8, read_timeout: 180)
|
2021-05-10 14:18:43 +00:00
|
|
|
Rails.logger.debug { "# curl -X #{method} \"#{url}\" " }
|
2019-09-24 09:35:05 +00:00
|
|
|
Rails.logger.debug { "-d '#{data.to_json}'" } if data.present?
|
|
|
|
|
|
|
|
options = {
|
|
|
|
json: true,
|
|
|
|
open_timeout: open_timeout,
|
|
|
|
read_timeout: read_timeout,
|
2020-03-11 15:45:39 +00:00
|
|
|
total_timeout: (open_timeout + read_timeout + 60),
|
2019-09-24 09:35:05 +00:00
|
|
|
open_socket_tries: 3,
|
|
|
|
user: Setting.get('es_user'),
|
|
|
|
password: Setting.get('es_password'),
|
|
|
|
}
|
|
|
|
|
|
|
|
response = UserAgent.send(method, url, data, options)
|
2019-09-24 13:04:42 +00:00
|
|
|
|
2021-05-10 14:18:43 +00:00
|
|
|
Rails.logger.debug { "# #{response.code}" }
|
2019-09-24 13:04:42 +00:00
|
|
|
|
2019-09-24 09:35:05 +00:00
|
|
|
response
|
|
|
|
end
|
|
|
|
|
|
|
|
=begin
|
|
|
|
|
|
|
|
helper method for making HTTP calls and raising error if response was not success
|
|
|
|
|
|
|
|
@param url [String] url
|
|
|
|
@option args [Hash] see {make_request}
|
|
|
|
|
|
|
|
@return [Boolean] always returns true. Raises error if something went wrong.
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
|
|
|
def self.make_request_and_validate(url, **args)
|
2021-07-06 07:52:22 +00:00
|
|
|
response = make_request(url, **args)
|
2019-09-24 09:35:05 +00:00
|
|
|
|
2019-09-24 13:04:42 +00:00
|
|
|
return true if response.success?
|
|
|
|
|
|
|
|
raise humanized_error(
|
2019-09-24 09:35:05 +00:00
|
|
|
verb: args[:method],
|
2019-09-24 13:04:42 +00:00
|
|
|
url: url,
|
2019-09-24 09:35:05 +00:00
|
|
|
payload: args[:data],
|
|
|
|
response: response
|
2019-09-24 13:04:42 +00:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2021-02-01 10:36:53 +00:00
|
|
|
=begin
|
|
|
|
|
|
|
|
This function will return a index mapping based on the
|
|
|
|
attributes of the database table of the existing object.
|
|
|
|
|
|
|
|
mapping = SearchIndexBackend.get_mapping_properties_object(Ticket)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
|
|
|
mapping = {
|
|
|
|
User: {
|
|
|
|
properties: {
|
|
|
|
firstname: {
|
|
|
|
type: 'keyword',
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
=end
|
|
|
|
|
|
|
|
def self.get_mapping_properties_object(object)
|
|
|
|
name = '_doc'
|
|
|
|
result = {
|
|
|
|
name => {
|
|
|
|
properties: {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
store_columns = %w[preferences data]
|
|
|
|
|
|
|
|
# for elasticsearch 6.x and later
|
|
|
|
string_type = 'text'
|
2021-02-01 12:42:44 +00:00
|
|
|
string_raw = { type: 'keyword', ignore_above: 5012 }
|
|
|
|
boolean_raw = { type: 'boolean' }
|
2021-02-01 10:36:53 +00:00
|
|
|
|
|
|
|
object.columns_hash.each do |key, value|
|
|
|
|
if value.type == :string && value.limit && value.limit <= 5000 && store_columns.exclude?(key)
|
|
|
|
result[name][:properties][key] = {
|
|
|
|
type: string_type,
|
|
|
|
fields: {
|
|
|
|
keyword: string_raw,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
elsif value.type == :integer
|
|
|
|
result[name][:properties][key] = {
|
|
|
|
type: 'integer',
|
|
|
|
}
|
|
|
|
elsif value.type == :datetime || value.type == :date
|
|
|
|
result[name][:properties][key] = {
|
|
|
|
type: 'date',
|
|
|
|
}
|
|
|
|
elsif value.type == :boolean
|
|
|
|
result[name][:properties][key] = {
|
|
|
|
type: 'boolean',
|
|
|
|
fields: {
|
|
|
|
keyword: boolean_raw,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
elsif value.type == :binary
|
|
|
|
result[name][:properties][key] = {
|
|
|
|
type: 'binary',
|
|
|
|
}
|
|
|
|
elsif value.type == :bigint
|
|
|
|
result[name][:properties][key] = {
|
|
|
|
type: 'long',
|
|
|
|
}
|
|
|
|
elsif value.type == :decimal
|
|
|
|
result[name][:properties][key] = {
|
|
|
|
type: 'float',
|
|
|
|
}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
case object.name
|
|
|
|
when 'Ticket'
|
|
|
|
result[name][:_source] = {
|
|
|
|
excludes: ['article.attachment']
|
|
|
|
}
|
|
|
|
result[name][:properties][:article] = {
|
|
|
|
type: 'nested',
|
|
|
|
include_in_parent: true,
|
|
|
|
}
|
|
|
|
when 'KnowledgeBase::Answer::Translation'
|
|
|
|
result[name][:_source] = {
|
|
|
|
excludes: ['attachment']
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
return result if type_in_mapping?
|
|
|
|
|
|
|
|
result[name]
|
|
|
|
end
|
|
|
|
|
|
|
|
# get es version
|
|
|
|
def self.version
|
|
|
|
@version ||= begin
|
|
|
|
info = SearchIndexBackend.info
|
|
|
|
number = nil
|
|
|
|
if info.present?
|
|
|
|
number = info['version']['number'].to_s
|
|
|
|
end
|
|
|
|
number
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.version_int
|
|
|
|
number = version
|
|
|
|
return 0 if !number
|
|
|
|
|
|
|
|
number_split = version.split('.')
|
|
|
|
"#{number_split[0]}#{format('%<minor>03d', minor: number_split[1])}#{format('%<patch>03d', patch: number_split[2])}".to_i
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.version_supported?
|
|
|
|
|
|
|
|
# only versions greater/equal than 6.5.0 are supported
|
|
|
|
return if version_int < 6_005_000
|
|
|
|
|
|
|
|
true
|
|
|
|
end
|
|
|
|
|
|
|
|
# no type in mapping
|
|
|
|
def self.type_in_mapping?
|
|
|
|
return true if version_int < 7_000_000
|
|
|
|
|
|
|
|
false
|
|
|
|
end
|
|
|
|
|
|
|
|
# is es configured?
|
|
|
|
def self.configured?
|
|
|
|
return false if Setting.get('es_url').blank?
|
|
|
|
|
|
|
|
true
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.settings
|
|
|
|
{
|
|
|
|
'index.mapping.total_fields.limit': 2000,
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.create_index(models = Models.indexable)
|
|
|
|
models.each do |local_object|
|
|
|
|
SearchIndexBackend.index(
|
|
|
|
action: 'create',
|
|
|
|
name: local_object.name,
|
|
|
|
data: {
|
|
|
|
mappings: SearchIndexBackend.get_mapping_properties_object(local_object),
|
|
|
|
settings: SearchIndexBackend.settings,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.drop_index(models = Models.indexable)
|
|
|
|
models.each do |local_object|
|
|
|
|
SearchIndexBackend.index(
|
|
|
|
action: 'delete',
|
|
|
|
name: local_object.name,
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.create_object_index(object)
|
|
|
|
models = Models.indexable.select { |c| c.to_s == object }
|
|
|
|
create_index(models)
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.drop_object_index(object)
|
|
|
|
models = Models.indexable.select { |c| c.to_s == object }
|
|
|
|
drop_index(models)
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.pipeline(create: false)
|
|
|
|
pipeline = Setting.get('es_pipeline')
|
|
|
|
if create && pipeline.blank?
|
2021-09-20 10:47:05 +00:00
|
|
|
pipeline = "zammad#{SecureRandom.uuid}"
|
2021-02-01 10:36:53 +00:00
|
|
|
Setting.set('es_pipeline', pipeline)
|
|
|
|
end
|
|
|
|
pipeline
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.pipeline_settings
|
|
|
|
{
|
|
|
|
ignore_failure: true,
|
|
|
|
ignore_missing: true,
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.create_pipeline
|
|
|
|
SearchIndexBackend.processors(
|
|
|
|
"_ingest/pipeline/#{pipeline(create: true)}": [
|
|
|
|
{
|
|
|
|
action: 'delete',
|
|
|
|
},
|
|
|
|
{
|
|
|
|
action: 'create',
|
|
|
|
description: 'Extract zammad-attachment information from arrays',
|
|
|
|
processors: [
|
|
|
|
{
|
|
|
|
foreach: {
|
|
|
|
field: 'article',
|
|
|
|
processor: {
|
|
|
|
foreach: {
|
|
|
|
field: '_ingest._value.attachment',
|
|
|
|
processor: {
|
|
|
|
attachment: {
|
|
|
|
target_field: '_ingest._value',
|
|
|
|
field: '_ingest._value._content',
|
|
|
|
}.merge(pipeline_settings),
|
|
|
|
}
|
|
|
|
}.merge(pipeline_settings),
|
|
|
|
}
|
|
|
|
}.merge(pipeline_settings),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
foreach: {
|
|
|
|
field: 'attachment',
|
|
|
|
processor: {
|
|
|
|
attachment: {
|
|
|
|
target_field: '_ingest._value',
|
|
|
|
field: '_ingest._value._content',
|
|
|
|
}.merge(pipeline_settings),
|
|
|
|
}
|
|
|
|
}.merge(pipeline_settings),
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
]
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.drop_pipeline
|
|
|
|
return if pipeline.blank?
|
|
|
|
|
|
|
|
SearchIndexBackend.processors(
|
|
|
|
"_ingest/pipeline/#{pipeline}": [
|
|
|
|
{
|
|
|
|
action: 'delete',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2021-11-30 12:40:08 +00:00
|
|
|
# verifies date range ElasticSearch payload
|
|
|
|
#
|
|
|
|
# @param url [String] of ElasticSearch
|
|
|
|
# @param payload [Hash] Elasticsearch query payload
|
|
|
|
#
|
|
|
|
# @return [Boolean] or raises error
|
|
|
|
def self.verify_date_range(url, payload)
|
|
|
|
ranges_payload = payload.dig(:query, :bool, :must)
|
|
|
|
|
|
|
|
return true if ranges_payload.nil?
|
|
|
|
|
|
|
|
ranges = ranges_payload
|
|
|
|
.select { |elem| elem.key? :range }
|
|
|
|
.map { |elem| [elem[:range].keys.first, convert_es_date_range(elem)] }
|
|
|
|
.each_with_object({}) { |elem, sum| (sum[elem.first] ||= []) << elem.last }
|
|
|
|
|
|
|
|
return true if ranges.all? { |_, ranges_by_key| verify_single_key_range(ranges_by_key) }
|
|
|
|
|
|
|
|
error_prefix = "Unable to process request to elasticsearch URL '#{url}'."
|
|
|
|
error_suffix = "Payload:\n#{payload.to_json}"
|
|
|
|
error_message = 'Conflicting date ranges'
|
|
|
|
|
|
|
|
result = "#{error_prefix} #{error_message} #{error_suffix}"
|
|
|
|
Rails.logger.error result.first(40_000)
|
|
|
|
|
|
|
|
raise result
|
|
|
|
end
|
|
|
|
|
|
|
|
# checks if all ranges are overlaping
|
|
|
|
#
|
|
|
|
# @param ranges [Array<Range<DateTime>>] to use in search
|
|
|
|
#
|
|
|
|
# @return [Boolean]
|
|
|
|
def self.verify_single_key_range(ranges)
|
|
|
|
ranges
|
|
|
|
.each_with_index
|
|
|
|
.all? do |range, i|
|
|
|
|
ranges
|
|
|
|
.slice((i + 1)..)
|
|
|
|
.all? { |elem| elem.overlaps? range }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Converts paylaod component to dates range
|
|
|
|
#
|
|
|
|
# @param elem [Hash] payload component
|
|
|
|
#
|
|
|
|
# @return [Range<DateTime>]
|
|
|
|
def self.convert_es_date_range(elem)
|
|
|
|
range = elem[:range].first.last
|
|
|
|
from = parse_es_range_date range[:from] || range[:gt] || '-9999-01-01'
|
|
|
|
to = parse_es_range_date range[:to] || range[:lt] || '9999-01-01'
|
|
|
|
|
|
|
|
from..to
|
|
|
|
end
|
|
|
|
|
|
|
|
# Parses absolute date or converts relative date
|
|
|
|
#
|
|
|
|
# @param input [String] string representation of date
|
|
|
|
#
|
|
|
|
# @return [Range<DateTime>]
|
|
|
|
def self.parse_es_range_date(input)
|
|
|
|
match = input.match(%r{^now(-|\+)(\d+)(\w{1})$})
|
|
|
|
|
|
|
|
return DateTime.parse input if !match
|
|
|
|
|
|
|
|
map = {
|
|
|
|
d: 'day',
|
|
|
|
y: 'year',
|
|
|
|
M: 'month',
|
|
|
|
h: 'hour',
|
|
|
|
m: 'minute',
|
|
|
|
}
|
|
|
|
|
|
|
|
range = match.captures[1].to_i.send map[match.captures[2].to_sym]
|
|
|
|
|
|
|
|
case match.captures[0]
|
|
|
|
when '-'
|
|
|
|
range.ago
|
|
|
|
when '+'
|
|
|
|
range.from_now
|
|
|
|
end
|
|
|
|
end
|
2015-04-27 14:15:29 +00:00
|
|
|
end
|