2016-10-19 03:11:36 +00:00
# Copyright (C) 2012-2016 Zammad Foundation, http://zammad-foundation.org/
2014-01-27 22:59:41 +00:00
class SearchIndexBackend
= begin
2017-12-04 00:24:58 +00:00
info about used search index machine
SearchIndexBackend . info
= end
def self . info
url = Setting . get ( 'es_url' ) . to_s
2017-12-04 01:12:55 +00:00
return if url . blank?
2018-10-09 06:17:41 +00:00
2019-09-24 09:35:05 +00:00
response = make_request ( url )
2018-03-08 15:54:51 +00:00
if response . success?
installed_version = response . data . dig ( 'version' , 'number' )
raise " Unable to get elasticsearch version from response: #{ response . inspect } " if installed_version . blank?
2018-10-09 06:17:41 +00:00
2019-06-20 10:45:27 +00:00
version_supported = Gem :: Version . new ( installed_version ) < Gem :: Version . new ( '8' )
raise " Version #{ installed_version } of configured elasticsearch is not supported. " if ! version_supported
version_supported = Gem :: Version . new ( installed_version ) > Gem :: Version . new ( '2.3' )
raise " Version #{ installed_version } of configured elasticsearch is not supported. " if ! version_supported
2018-10-09 06:17:41 +00:00
2018-03-08 15:54:51 +00:00
return response . data
end
2018-03-08 15:30:07 +00:00
raise humanized_error (
verb : 'GET' ,
url : url ,
response : response ,
)
2017-12-04 00:24:58 +00:00
end
= begin
update processors
SearchIndexBackend . processors (
_ingest / pipeline / attachment : {
description : 'Extract attachment information from arrays' ,
processors : [
{
foreach : {
field : 'ticket.articles.attachments' ,
processor : {
attachment : {
target_field : '_ingest._value.attachment' ,
field : '_ingest._value.data'
}
}
}
}
]
}
)
= end
def self . processors ( data )
data . each do | key , items |
url = " #{ Setting . get ( 'es_url' ) } / #{ key } "
items . each do | item |
if item [ :action ] == 'delete'
2019-09-24 09:35:05 +00:00
response = make_request ( url , method : :delete )
2017-12-04 00:48:41 +00:00
next if response . success?
2017-12-15 13:58:41 +00:00
next if response . code . to_s == '404'
2018-03-08 15:30:07 +00:00
raise humanized_error (
verb : 'DELETE' ,
url : url ,
response : response ,
)
2017-12-04 00:24:58 +00:00
end
2019-09-24 09:35:05 +00:00
2017-12-04 00:24:58 +00:00
item . delete ( :action )
2019-09-24 09:35:05 +00:00
make_request_and_validate ( url , data : item , method : :put )
2017-12-04 00:24:58 +00:00
end
end
true
end
= begin
2014-02-03 12:08:41 +00:00
create / update / delete index
SearchIndexBackend . index (
:action = > 'create' , # create/update/delete
2019-06-20 10:45:27 +00:00
:name = > 'Ticket' ,
2014-02-03 12:08:41 +00:00
:data = > {
:mappings = > {
:Ticket = > {
:properties = > {
2015-04-06 19:00:16 +00:00
:articles = > {
2014-02-03 12:08:41 +00:00
:type = > 'nested' ,
:properties = > {
2017-06-16 22:56:28 +00:00
'attachment' = > { :type = > 'attachment' }
2014-04-28 15:30:06 +00:00
}
2014-02-03 12:08:41 +00:00
}
2014-04-28 15:30:06 +00:00
}
}
2014-02-03 12:08:41 +00:00
}
}
)
SearchIndexBackend . index (
:action = > 'delete' , # create/update/delete
2019-06-20 10:45:27 +00:00
:name = > 'Ticket' ,
2014-02-03 12:08:41 +00:00
)
= end
def self . index ( data )
2020-02-18 14:49:52 +00:00
url = build_url ( type : data [ :name ] , with_pipeline : false , with_document_type : false )
2017-12-15 13:58:41 +00:00
return if url . blank?
2014-02-03 12:08:41 +00:00
if data [ :action ] && data [ :action ] == 'delete'
2016-04-26 12:42:55 +00:00
return SearchIndexBackend . remove ( data [ :name ] )
2014-02-03 12:08:41 +00:00
end
2019-09-24 09:35:05 +00:00
make_request_and_validate ( url , data : data [ :data ] , method : :put )
2014-02-03 12:08:41 +00:00
end
= begin
2014-01-27 22:59:41 +00:00
add new object to search index
2016-04-26 12:42:55 +00:00
SearchIndexBackend . add ( 'Ticket' , some_data_object )
2014-01-27 22:59:41 +00:00
= end
def self . add ( type , data )
2020-02-18 14:49:52 +00:00
url = build_url ( type : type , object_id : data [ 'id' ] )
2017-12-15 13:58:41 +00:00
return if url . blank?
2014-01-27 22:59:41 +00:00
2019-09-24 09:35:05 +00:00
make_request_and_validate ( url , data : data , method : :post )
2014-01-27 22:59:41 +00:00
end
= begin
2020-02-18 14:49:52 +00:00
This function updates specifc attributes of an index based on a query .
data = {
organization : {
name : " Zammad Foundation "
}
}
where = {
organization_id : 1
}
SearchIndexBackend . update_by_query ( 'Ticket' , data , where )
= end
def self . update_by_query ( type , data , where )
return if data . blank?
return if where . blank?
url = build_url ( type : type , action : '_update_by_query' , with_pipeline : false , with_document_type : false , url_params : { conflicts : 'proceed' } )
return if url . blank?
script_list = [ ]
data . each do | key , _value |
script_list . push ( " ctx._source. #{ key } =params. #{ key } " )
end
data = {
script : {
lang : 'painless' ,
source : script_list . join ( ';' ) ,
params : data ,
} ,
query : {
term : where ,
} ,
}
make_request_and_validate ( url , data : data , method : :post , read_timeout : 10 . minutes )
end
= begin
2014-01-27 22:59:41 +00:00
remove whole data from index
2016-04-26 12:42:55 +00:00
SearchIndexBackend . remove ( 'Ticket' , 123 )
2014-01-27 22:59:41 +00:00
2016-04-26 12:42:55 +00:00
SearchIndexBackend . remove ( 'Ticket' )
2014-01-27 22:59:41 +00:00
= end
2016-04-26 12:42:55 +00:00
def self . remove ( type , o_id = nil )
2019-09-26 11:41:20 +00:00
url = if o_id
2020-02-18 14:49:52 +00:00
build_url ( type : type , object_id : o_id , with_pipeline : false , with_document_type : true )
2019-09-26 11:41:20 +00:00
else
2020-02-18 14:49:52 +00:00
build_url ( type : type , object_id : o_id , with_pipeline : false , with_document_type : false )
2019-09-26 11:41:20 +00:00
end
2017-12-15 13:58:41 +00:00
return if url . blank?
2014-01-27 22:59:41 +00:00
2019-09-24 09:35:05 +00:00
response = make_request ( url , method : :delete )
2014-01-27 22:59:41 +00:00
2015-03-23 00:31:30 +00:00
return true if response . success?
2017-12-15 13:58:41 +00:00
return true if response . code . to_s == '400'
2018-03-08 15:30:07 +00:00
humanized_error = humanized_error (
verb : 'DELETE' ,
url : url ,
response : response ,
)
Rails . logger . info " NOTICE: can't delete index: #{ humanized_error } "
2015-04-06 19:00:16 +00:00
false
2014-01-27 22:59:41 +00:00
end
= begin
2018-11-06 16:11:10 +00:00
@param query [ String ] search query
2019-06-20 10:45:27 +00:00
@param index [ String , Array < String > ] indexes to search in ( see search_by_index )
2018-11-06 16:11:10 +00:00
@param options [ Hash ] search options ( see build_query )
2014-01-27 22:59:41 +00:00
2018-11-06 16:11:10 +00:00
@return search result
2014-09-19 21:35:40 +00:00
2018-11-06 16:11:10 +00:00
@example Sample queries
2014-01-27 22:59:41 +00:00
2018-11-06 16:11:10 +00:00
result = SearchIndexBackend . search ( 'search query' , [ 'User' , 'Organization' ] , limit : limit )
2019-08-28 09:41:04 +00:00
- result = SearchIndexBackend . search ( 'search query' , 'User' , limit : limit )
2018-11-06 16:11:10 +00:00
result = SearchIndexBackend . search ( 'search query' , 'User' , limit : limit , sort_by : [ 'updated_at' ] , order_by : [ 'desc' ] )
2018-07-18 14:00:06 +00:00
2019-08-28 09:41:04 +00:00
result = SearchIndexBackend . search ( 'search query' , 'User' , limit : limit , sort_by : [ 'active' , updated_at '], order_by: [' desc ', ' desc ' ] )
2014-09-19 21:35:40 +00:00
result = [
{
:id = > 123 ,
:type = > 'User' ,
} ,
{
:id = > 125 ,
:type = > 'User' ,
} ,
{
:id = > 15 ,
:type = > 'Organization' ,
}
]
2014-01-27 22:59:41 +00:00
= end
2019-06-20 10:45:27 +00:00
def self . search ( query , index , options = { } )
2018-11-06 16:11:10 +00:00
if ! index . is_a? Array
return search_by_index ( query , index , options )
2016-07-07 23:22:09 +00:00
end
2018-11-06 16:11:10 +00:00
index
. map { | local_index | search_by_index ( query , local_index , options ) }
. compact
. flatten ( 1 )
2016-07-07 23:22:09 +00:00
end
2018-11-06 16:11:10 +00:00
= begin
2019-06-20 10:45:27 +00:00
@param query [ String ] search query
@param index [ String ] index name
2018-11-06 16:11:10 +00:00
@param options [ Hash ] search options ( see build_query )
@return search result
= end
2019-06-20 10:45:27 +00:00
def self . search_by_index ( query , index , options = { } )
2017-12-15 13:58:41 +00:00
return [ ] if query . blank?
2014-01-29 23:51:55 +00:00
2020-02-18 14:49:52 +00:00
url = build_url ( type : index , action : '_search' , with_pipeline : false , with_document_type : true )
2019-07-04 13:23:58 +00:00
return [ ] if url . blank?
2018-10-09 06:17:41 +00:00
2014-02-03 12:08:41 +00:00
# real search condition
2014-02-02 18:58:31 +00:00
condition = {
'query_string' = > {
2018-12-19 17:31:51 +00:00
'query' = > append_wildcard_to_simple_query ( query ) ,
2017-05-15 16:20:39 +00:00
'default_operator' = > 'AND' ,
2018-10-02 11:50:50 +00:00
'analyze_wildcard' = > true ,
2014-02-02 18:58:31 +00:00
}
}
2018-11-06 16:11:10 +00:00
2019-06-04 03:40:48 +00:00
if ( fields = options . dig ( :highlight_fields_by_indexes , index . to_sym ) )
condition [ 'query_string' ] [ 'fields' ] = fields
end
2018-11-06 16:11:10 +00:00
query_data = build_query ( condition , options )
2014-01-29 23:51:55 +00:00
2019-06-04 03:40:48 +00:00
if ( fields = options . dig ( :highlight_fields_by_indexes , index . to_sym ) )
fields_for_highlight = fields . each_with_object ( { } ) { | elem , memo | memo [ elem ] = { } }
query_data [ :highlight ] = { fields : fields_for_highlight }
end
2019-09-24 09:35:05 +00:00
response = make_request ( url , data : query_data )
2015-03-23 00:31:30 +00:00
2014-02-03 18:26:22 +00:00
if ! response . success?
2018-03-08 15:30:07 +00:00
Rails . logger . error humanized_error (
verb : 'GET' ,
url : url ,
2018-11-06 16:11:10 +00:00
payload : query_data ,
2018-03-08 15:30:07 +00:00
response : response ,
)
2014-02-03 19:40:42 +00:00
return [ ]
2014-02-03 18:26:22 +00:00
end
2018-11-06 16:11:10 +00:00
data = response . data & . dig ( 'hits' , 'hits' )
2014-02-03 18:26:22 +00:00
2018-11-06 16:11:10 +00:00
return [ ] if ! data
2018-10-09 06:17:41 +00:00
2018-11-06 16:11:10 +00:00
data . map do | item |
2015-05-06 09:30:39 +00:00
Rails . logger . info " ... #{ item [ '_type' ] } #{ item [ '_id' ] } "
2018-11-06 16:11:10 +00:00
2019-06-04 03:40:48 +00:00
output = {
2019-06-20 10:45:27 +00:00
id : item [ '_id' ] ,
type : index ,
2014-09-19 21:35:40 +00:00
}
2019-06-04 03:40:48 +00:00
if options . dig ( :highlight_fields_by_indexes , index . to_sym )
output [ :highlight ] = item [ 'highlight' ]
end
output
2017-10-01 12:25:52 +00:00
end
2014-01-27 22:59:41 +00:00
end
2018-11-06 16:11:10 +00:00
def self . search_by_index_sort ( sort_by = nil , order_by = nil )
2018-07-18 14:00:06 +00:00
result = [ ]
2018-11-06 16:11:10 +00:00
sort_by & . each_with_index do | value , index |
2018-07-18 14:00:06 +00:00
next if value . blank?
2018-11-06 16:11:10 +00:00
next if order_by & . at ( index ) . blank?
2018-07-18 14:00:06 +00:00
2019-10-30 07:45:31 +00:00
# for sorting values use .keyword values (no analyzer is used - plain values)
2018-07-18 14:00:06 +00:00
if value !~ / \ . / && value !~ / _(time|date|till|id|ids|at)$ /
2019-10-30 07:45:31 +00:00
value += '.keyword'
2018-07-18 14:00:06 +00:00
end
result . push (
value = > {
order : order_by [ index ] ,
} ,
)
end
if result . blank?
result . push (
2018-07-19 16:54:12 +00:00
updated_at : {
2018-07-18 14:00:06 +00:00
order : 'desc' ,
} ,
)
end
result . push ( '_score' )
result
end
2014-02-02 18:58:31 +00:00
= begin
2015-10-29 02:33:36 +00:00
get count of tickets and tickets which match on selector
2015-10-20 08:48:43 +00:00
2019-04-17 06:26:26 +00:00
result = SearchIndexBackend . selectors ( index , selector )
example with a simple search :
2019-06-20 10:45:27 +00:00
result = SearchIndexBackend . selectors ( 'Ticket' , { 'category' = > { 'operator' = > 'is' , 'value' = > 'aa::ab' } } )
2019-04-17 06:26:26 +00:00
result = [
{ id : 1 , type : 'Ticket' } ,
{ id : 2 , type : 'Ticket' } ,
{ id : 3 , type : 'Ticket' } ,
]
you also can get aggregations
result = SearchIndexBackend . selectors ( index , selector , options , aggs_interval )
example for aggregations within one year
2015-10-29 02:33:36 +00:00
aggs_interval = {
from : '2015-01-01' ,
to : '2015-12-31' ,
interval : 'month' , # year, quarter, month, week, day, hour, minute, second
field : 'created_at' ,
}
2015-10-20 08:48:43 +00:00
2019-03-26 00:17:17 +00:00
options = {
limit : 123 ,
current_user : User . find ( 123 ) ,
}
2019-06-20 10:45:27 +00:00
result = SearchIndexBackend . selectors ( 'Ticket' , { 'category' = > { 'operator' = > 'is' , 'value' = > 'aa::ab' } } , options , aggs_interval )
2015-10-20 08:48:43 +00:00
result = {
hits : {
total : 4819 ,
} ,
aggregations : {
time_buckets : {
buckets : [
{
key_as_string : " 2014-10-01T00:00:00.000Z " ,
key : 1412121600000 ,
doc_count : 420
} ,
{
key_as_string : " 2014-11-01T00:00:00.000Z " ,
key : 1414800000000 ,
doc_count : 561
} ,
...
]
}
}
}
= end
2019-06-20 10:45:27 +00:00
def self . selectors ( index , selectors = nil , options = { } , aggs_interval = nil )
2016-03-01 14:26:46 +00:00
raise 'no selectors given' if ! selectors
2015-10-20 08:48:43 +00:00
2020-02-18 14:49:52 +00:00
url = build_url ( type : index , action : '_search' , with_pipeline : false , with_document_type : true )
2017-12-15 13:58:41 +00:00
return if url . blank?
2018-10-09 06:17:41 +00:00
2019-03-26 00:17:17 +00:00
data = selector2query ( selectors , options , aggs_interval )
2015-10-20 08:48:43 +00:00
2019-09-24 09:35:05 +00:00
response = make_request ( url , data : data )
2015-10-20 08:48:43 +00:00
if ! response . success?
2018-03-08 15:30:07 +00:00
raise humanized_error (
verb : 'GET' ,
url : url ,
payload : data ,
response : response ,
)
2015-10-20 08:48:43 +00:00
end
2018-03-20 17:47:49 +00:00
Rails . logger . debug { response . data . to_json }
2015-10-29 02:33:36 +00:00
2017-12-15 13:58:41 +00:00
if aggs_interval . blank? || aggs_interval [ :interval ] . blank?
2015-10-29 02:33:36 +00:00
ticket_ids = [ ]
2017-10-01 12:25:52 +00:00
response . data [ 'hits' ] [ 'hits' ] . each do | item |
2015-10-29 02:33:36 +00:00
ticket_ids . push item [ '_id' ]
2017-10-01 12:25:52 +00:00
end
2019-10-30 07:45:31 +00:00
# in lower ES 6 versions, we get total count directly, in higher
# versions we need to pick it from total has
count = response . data [ 'hits' ] [ 'total' ]
if response . data [ 'hits' ] [ 'total' ] . class != Integer
count = response . data [ 'hits' ] [ 'total' ] [ 'value' ]
end
2015-10-29 02:33:36 +00:00
return {
2019-10-30 07:45:31 +00:00
count : count ,
2015-10-29 02:33:36 +00:00
ticket_ids : ticket_ids ,
}
end
2015-10-20 08:48:43 +00:00
response . data
end
2019-03-26 00:17:17 +00:00
DEFAULT_SELECTOR_OPTIONS = {
limit : 10
} . freeze
def self . selector2query ( selector , options , aggs_interval )
options = DEFAULT_QUERY_OPTIONS . merge ( options . deep_symbolize_keys )
2015-10-29 02:33:36 +00:00
query_must = [ ]
query_must_not = [ ]
2018-07-03 14:39:42 +00:00
relative_map = {
2018-12-19 17:31:51 +00:00
day : 'd' ,
year : 'y' ,
month : 'M' ,
hour : 'h' ,
2018-07-03 14:39:42 +00:00
minute : 'm' ,
}
2017-12-04 00:24:58 +00:00
if selector . present?
2017-10-01 12:25:52 +00:00
selector . each do | key , data |
2015-10-29 02:33:36 +00:00
key_tmp = key . sub ( / ^.+? \ . / , '' )
2019-10-30 07:45:31 +00:00
wildcard_or_term = 'term'
2019-11-19 08:03:46 +00:00
if data [ 'value' ] . is_a? ( Array )
wildcard_or_term = 'terms'
end
2015-10-29 02:33:36 +00:00
t = { }
2018-07-03 14:39:42 +00:00
2019-10-30 07:45:31 +00:00
# use .keyword in case of compare exact values
if data [ 'operator' ] == 'is' || data [ 'operator' ] == 'is not'
if data [ 'value' ] . is_a? ( Array )
data [ 'value' ] . each do | value |
next if ! value . is_a? ( String ) || value !~ / [A-z] /
key_tmp += '.keyword'
2019-04-17 06:26:26 +00:00
break
end
2019-10-30 07:45:31 +00:00
elsif data [ 'value' ] . is_a? ( String ) && / [A-z] / . match? ( data [ 'value' ] )
key_tmp += '.keyword'
2019-04-17 06:26:26 +00:00
end
2019-10-30 07:45:31 +00:00
end
# use .keyword and wildcard search in cases where query contains non A-z chars
if data [ 'operator' ] == 'contains' || data [ 'operator' ] == 'contains not'
if data [ 'value' ] . is_a? ( Array )
data [ 'value' ] . each_with_index do | value , index |
next if ! value . is_a? ( String ) || value !~ / [A-z] / || value !~ / \ W /
data [ 'value' ] [ index ] = " * #{ value } * "
key_tmp += '.keyword'
wildcard_or_term = 'wildcards'
break
end
elsif data [ 'value' ] . is_a? ( String ) && / [A-z] / . match? ( data [ 'value' ] ) && data [ 'value' ] =~ / \ W /
data [ 'value' ] = " * #{ data [ 'value' ] } * "
key_tmp += '.keyword'
wildcard_or_term = 'wildcard'
2019-04-17 06:26:26 +00:00
end
end
2018-07-03 14:39:42 +00:00
# is/is not/contains/contains not
if data [ 'operator' ] == 'is' || data [ 'operator' ] == 'is not' || data [ 'operator' ] == 'contains' || data [ 'operator' ] == 'contains not'
2019-10-30 07:45:31 +00:00
t [ wildcard_or_term ] = { }
t [ wildcard_or_term ] [ key_tmp ] = data [ 'value' ]
2018-07-04 16:24:51 +00:00
if data [ 'operator' ] == 'is' || data [ 'operator' ] == 'contains'
query_must . push t
elsif data [ 'operator' ] == 'is not' || data [ 'operator' ] == 'contains not'
query_must_not . push t
end
elsif data [ 'operator' ] == 'contains all' || data [ 'operator' ] == 'contains one' || data [ 'operator' ] == 'contains all not' || data [ 'operator' ] == 'contains one not'
values = data [ 'value' ] . split ( ',' ) . map ( & :strip )
t [ :query_string ] = { }
if data [ 'operator' ] == 'contains all'
t [ :query_string ] [ :query ] = " #{ key_tmp } : \" #{ values . join ( '" AND "' ) } \" "
query_must . push t
elsif data [ 'operator' ] == 'contains one not'
t [ :query_string ] [ :query ] = " #{ key_tmp } : \" #{ values . join ( '" OR "' ) } \" "
query_must_not . push t
elsif data [ 'operator' ] == 'contains one'
t [ :query_string ] [ :query ] = " #{ key_tmp } : \" #{ values . join ( '" OR "' ) } \" "
query_must . push t
elsif data [ 'operator' ] == 'contains all not'
t [ :query_string ] [ :query ] = " #{ key_tmp } : \" #{ values . join ( '" AND "' ) } \" "
query_must_not . push t
end
2018-07-03 14:39:42 +00:00
# within last/within next (relative)
elsif data [ 'operator' ] == 'within last (relative)' || data [ 'operator' ] == 'within next (relative)'
range = relative_map [ data [ 'range' ] . to_sym ]
if range . blank?
raise " Invalid relative_map for range ' #{ data [ 'range' ] } '. "
end
2018-10-09 06:17:41 +00:00
2018-07-03 14:39:42 +00:00
t [ :range ] = { }
t [ :range ] [ key_tmp ] = { }
if data [ 'operator' ] == 'within last (relative)'
t [ :range ] [ key_tmp ] [ :gte ] = " now- #{ data [ 'value' ] } #{ range } "
else
t [ :range ] [ key_tmp ] [ :lt ] = " now+ #{ data [ 'value' ] } #{ range } "
end
2018-07-04 16:24:51 +00:00
query_must . push t
2018-07-03 14:39:42 +00:00
# before/after (relative)
elsif data [ 'operator' ] == 'before (relative)' || data [ 'operator' ] == 'after (relative)'
range = relative_map [ data [ 'range' ] . to_sym ]
if range . blank?
raise " Invalid relative_map for range ' #{ data [ 'range' ] } '. "
end
2018-10-09 06:17:41 +00:00
2018-07-03 14:39:42 +00:00
t [ :range ] = { }
t [ :range ] [ key_tmp ] = { }
if data [ 'operator' ] == 'before (relative)'
t [ :range ] [ key_tmp ] [ :lt ] = " now- #{ data [ 'value' ] } #{ range } "
else
t [ :range ] [ key_tmp ] [ :gt ] = " now+ #{ data [ 'value' ] } #{ range } "
end
2018-07-04 16:24:51 +00:00
query_must . push t
2018-07-03 14:39:42 +00:00
# before/after (absolute)
elsif data [ 'operator' ] == 'before (absolute)' || data [ 'operator' ] == 'after (absolute)'
t [ :range ] = { }
t [ :range ] [ key_tmp ] = { }
if data [ 'operator' ] == 'before (absolute)'
2019-06-20 10:45:27 +00:00
t [ :range ] [ key_tmp ] [ :lt ] = ( data [ 'value' ] )
2018-07-03 14:39:42 +00:00
else
2019-06-20 10:45:27 +00:00
t [ :range ] [ key_tmp ] [ :gt ] = ( data [ 'value' ] )
2018-07-03 14:39:42 +00:00
end
query_must . push t
2015-10-29 02:33:36 +00:00
else
2018-01-15 11:54:26 +00:00
raise " unknown operator ' #{ data [ 'operator' ] } ' for #{ key } "
2015-10-29 02:33:36 +00:00
end
2017-10-01 12:25:52 +00:00
end
2015-10-29 02:33:36 +00:00
end
data = {
2015-10-29 09:45:29 +00:00
query : { } ,
2019-03-26 00:17:17 +00:00
size : options [ :limit ] ,
2015-10-29 02:33:36 +00:00
}
# add aggs to filter
2017-12-15 13:58:41 +00:00
if aggs_interval . present?
if aggs_interval [ :interval ] . present?
2015-10-29 02:33:36 +00:00
data [ :size ] = 0
data [ :aggs ] = {
time_buckets : {
date_histogram : {
2018-12-19 17:31:51 +00:00
field : aggs_interval [ :field ] ,
2015-10-29 02:33:36 +00:00
interval : aggs_interval [ :interval ] ,
}
}
}
2019-03-26 00:17:17 +00:00
if aggs_interval [ :timezone ] . present?
data [ :aggs ] [ :time_buckets ] [ :date_histogram ] [ :time_zone ] = aggs_interval [ :timezone ]
end
2015-10-29 02:33:36 +00:00
end
r = { }
r [ :range ] = { }
r [ :range ] [ aggs_interval [ :field ] ] = {
from : aggs_interval [ :from ] ,
2018-12-19 17:31:51 +00:00
to : aggs_interval [ :to ] ,
2015-10-29 02:33:36 +00:00
}
2016-11-18 14:42:06 +00:00
query_must . push r
2015-10-29 02:33:36 +00:00
end
2017-12-15 13:58:41 +00:00
data [ :query ] [ :bool ] || = { }
2016-11-18 14:42:06 +00:00
2017-11-23 08:09:44 +00:00
if query_must . present?
2016-11-18 14:42:06 +00:00
data [ :query ] [ :bool ] [ :must ] = query_must
2015-10-29 02:33:36 +00:00
end
2017-11-23 08:09:44 +00:00
if query_must_not . present?
2016-11-18 14:42:06 +00:00
data [ :query ] [ :bool ] [ :must_not ] = query_must_not
2015-10-29 02:33:36 +00:00
end
2015-10-29 09:07:45 +00:00
# add sort
2017-12-15 13:58:41 +00:00
if aggs_interval . present? && aggs_interval [ :field ] . present? && aggs_interval [ :interval ] . blank?
2015-10-29 09:07:45 +00:00
sort = [ ]
sort [ 0 ] = { }
sort [ 0 ] [ aggs_interval [ :field ] ] = {
order : 'desc'
}
sort [ 1 ] = '_score'
data [ 'sort' ] = sort
2019-10-30 07:45:31 +00:00
else
data [ 'sort' ] = search_by_index_sort ( options [ :sort_by ] , options [ :order_by ] )
2015-10-29 09:07:45 +00:00
end
2015-10-29 02:33:36 +00:00
data
end
2015-10-20 08:48:43 +00:00
= begin
2014-04-28 15:30:06 +00:00
return true if backend is configured
2014-02-02 18:58:31 +00:00
result = SearchIndexBackend . enabled?
= end
def self . enabled?
2017-06-28 17:13:52 +00:00
return false if Setting . get ( 'es_url' ) . blank?
2018-10-09 06:17:41 +00:00
2014-02-02 18:58:31 +00:00
true
end
2020-02-18 14:49:52 +00:00
def self . build_index_name ( index = nil )
2019-06-20 10:45:27 +00:00
local_index = " #{ Setting . get ( 'es_index' ) } _ #{ Rails . env } "
2020-02-18 14:49:52 +00:00
return local_index if index . blank?
return " #{ local_index } / #{ index } " if lower_equal_es56?
2019-06-20 10:45:27 +00:00
" #{ local_index } _ #{ index . underscore . tr ( '/' , '_' ) } "
end
2019-09-26 11:41:20 +00:00
= begin
2020-02-18 14:49:52 +00:00
return true if the elastic search version is lower equal 5 . 6
result = SearchIndexBackend . lower_equal_es56?
returns
result = true
= end
def self . lower_equal_es56?
Setting . get ( 'es_multi_index' ) == false
end
= begin
2019-09-26 11:41:20 +00:00
generate url for index or document access ( only for internal use )
# url to access single document in index (in case with_pipeline or not)
2020-02-18 14:49:52 +00:00
url = SearchIndexBackend . build_url ( type : 'User' , object_id : 123 , with_pipeline : true )
2019-09-26 11:41:20 +00:00
# url to access whole index
2020-02-18 14:49:52 +00:00
url = SearchIndexBackend . build_url ( type : 'User' )
2019-09-26 11:41:20 +00:00
# url to access document definition in index (only es6 and higher)
2020-02-18 14:49:52 +00:00
url = SearchIndexBackend . build_url ( type : 'User' , with_pipeline : false , with_document_type : true )
2019-09-26 11:41:20 +00:00
# base url
url = SearchIndexBackend . build_url
= end
2020-02-18 14:49:52 +00:00
# rubocop:disable Metrics/ParameterLists
def self . build_url ( type : nil , action : nil , object_id : nil , with_pipeline : true , with_document_type : true , url_params : { } )
# rubocop:enable Metrics/ParameterLists
2014-02-02 18:58:31 +00:00
return if ! SearchIndexBackend . enabled?
2018-10-09 06:17:41 +00:00
2020-02-18 14:49:52 +00:00
# set index
index = build_index_name ( type )
2019-06-20 10:45:27 +00:00
2020-02-18 14:49:52 +00:00
# add pipeline if needed
if index && with_pipeline == true
2019-06-20 10:45:27 +00:00
url_pipline = Setting . get ( 'es_pipeline' )
if url_pipline . present?
2020-02-18 14:49:52 +00:00
url_params [ 'pipeline' ] = url_pipline
2019-06-20 10:45:27 +00:00
end
end
2020-02-18 14:49:52 +00:00
# prepare url params
params_string = ''
if url_params . present?
params_string = '?' + url_params . map { | key , value | " #{ key } = #{ value } " } . join ( '&' )
2019-06-20 10:45:27 +00:00
end
2019-09-26 11:41:20 +00:00
2020-02-18 14:49:52 +00:00
url = Setting . get ( 'es_url' )
return " #{ url } #{ params_string } " if index . blank?
2019-09-26 11:41:20 +00:00
2020-02-18 14:49:52 +00:00
# add type information
url = " #{ url } / #{ index } "
2019-09-26 11:41:20 +00:00
2020-02-18 14:49:52 +00:00
# add document type
if with_document_type && ! lower_equal_es56?
url = " #{ url } /_doc "
end
2019-06-20 10:45:27 +00:00
2020-02-18 14:49:52 +00:00
# add action
if action
url = " #{ url } / #{ action } "
end
2019-06-20 10:45:27 +00:00
2020-02-18 14:49:52 +00:00
# add object id
if object_id . present?
url = " #{ url } / #{ object_id } "
2019-06-20 10:45:27 +00:00
end
2020-02-18 14:49:52 +00:00
" #{ url } #{ params_string } "
2014-01-28 09:58:49 +00:00
end
2018-03-08 15:30:07 +00:00
def self . humanized_error ( verb : , url : , payload : nil , response : )
prefix = " Unable to process #{ verb } request to elasticsearch URL ' #{ url } '. "
suffix = " \n \n Response: \n #{ response . inspect } \n \n Payload: \n #{ payload . inspect } "
if payload . respond_to? ( :to_json )
suffix += " \n \n Payload size: #{ payload . to_json . bytesize / 1024 / 1024 } M "
end
message = if response & . error & . match? ( 'Connection refused' )
" Elasticsearch is not reachable, probably because it's not running or even installed. "
2018-04-03 09:27:58 +00:00
elsif url . end_with? ( 'pipeline/zammad-attachment' , 'pipeline=zammad-attachment' ) && response . code == 400
2018-03-08 15:30:07 +00:00
'The installed attachment plugin could not handle the request payload. Ensure that the correct attachment plugin is installed (5.6 => ingest-attachment, 2.4 - 5.5 => mapper-attachments).'
else
'Check the response and payload for detailed information: '
end
2018-05-11 08:16:18 +00:00
result = " #{ prefix } #{ message } #{ suffix } "
Rails . logger . error result . first ( 40_000 )
result
2018-03-08 15:30:07 +00:00
end
2018-07-03 07:18:55 +00:00
2018-10-02 11:50:50 +00:00
# add * on simple query like "somephrase23"
2018-07-03 07:18:55 +00:00
def self . append_wildcard_to_simple_query ( query )
query . strip!
2018-10-02 11:50:50 +00:00
query += '*' if ! query . match? ( / : / )
2018-07-03 07:18:55 +00:00
query
end
2018-11-06 16:11:10 +00:00
= begin
@param condition [ Hash ] search condition
@param options [ Hash ] search options
@option options [ Integer ] :from
@option options [ Integer ] :limit
@option options [ Hash ] :query_extension applied to ElasticSearch query
@option options [ Array < String > ] :order_by ordering directions , desc or asc
@option options [ Array < String > ] :sort_by fields to sort by
= end
DEFAULT_QUERY_OPTIONS = {
from : 0 ,
limit : 10
} . freeze
def self . build_query ( condition , options = { } )
options = DEFAULT_QUERY_OPTIONS . merge ( options . deep_symbolize_keys )
data = {
from : options [ :from ] ,
size : options [ :limit ] ,
sort : search_by_index_sort ( options [ :sort_by ] , options [ :order_by ] ) ,
query : {
bool : {
must : [ ]
}
}
}
if ( extension = options . dig ( :query_extension ) )
data [ :query ] . deep_merge! extension . deep_dup
end
data [ :query ] [ :bool ] [ :must ] . push condition
data
end
2019-06-20 10:45:27 +00:00
2019-09-24 13:04:42 +00:00
= begin
refreshes all indexes to make previous request data visible in future requests
SearchIndexBackend . refresh
= end
def self . refresh
return if ! enabled?
url = " #{ Setting . get ( 'es_url' ) } /_all/_refresh "
2019-09-24 09:35:05 +00:00
make_request_and_validate ( url , method : :post )
end
= begin
helper method for making HTTP calls
@param url [ String ] url
@option params [ Hash ] :data is a payload hash
@option params [ Symbol ] :method is a HTTP method
@option params [ Integer ] :open_timeout is HTTP request open timeout
@option params [ Integer ] :read_timeout is HTTP request read timeout
@return UserAgent response
= end
2020-03-11 15:45:39 +00:00
def self . make_request ( url , data : { } , method : :get , open_timeout : 8 , read_timeout : 180 )
2019-09-24 09:35:05 +00:00
Rails . logger . info " # curl -X #{ method } \" #{ url } \" "
Rails . logger . debug { " -d ' #{ data . to_json } ' " } if data . present?
options = {
json : true ,
open_timeout : open_timeout ,
read_timeout : read_timeout ,
2020-03-11 15:45:39 +00:00
total_timeout : ( open_timeout + read_timeout + 60 ) ,
2019-09-24 09:35:05 +00:00
open_socket_tries : 3 ,
user : Setting . get ( 'es_user' ) ,
password : Setting . get ( 'es_password' ) ,
}
response = UserAgent . send ( method , url , data , options )
2019-09-24 13:04:42 +00:00
Rails . logger . info " # #{ response . code } "
2019-09-24 09:35:05 +00:00
response
end
= begin
helper method for making HTTP calls and raising error if response was not success
@param url [ String ] url
@option args [ Hash ] see { make_request }
@return [ Boolean ] always returns true . Raises error if something went wrong .
= end
def self . make_request_and_validate ( url , ** args )
response = make_request ( url , args )
2019-09-24 13:04:42 +00:00
return true if response . success?
raise humanized_error (
2019-09-24 09:35:05 +00:00
verb : args [ :method ] ,
2019-09-24 13:04:42 +00:00
url : url ,
2019-09-24 09:35:05 +00:00
payload : args [ :data ] ,
response : response
2019-09-24 13:04:42 +00:00
)
end
2015-04-27 14:15:29 +00:00
end