2016-10-19 03:11:36 +00:00
# Copyright (C) 2012-2016 Zammad Foundation, http://zammad-foundation.org/
2014-01-27 22:59:41 +00:00
class SearchIndexBackend
= begin
2017-12-04 00:24:58 +00:00
info about used search index machine
SearchIndexBackend . info
= end
def self . info
url = Setting . get ( 'es_url' ) . to_s
2017-12-04 01:12:55 +00:00
return if url . blank?
2018-10-09 06:17:41 +00:00
2017-12-04 00:24:58 +00:00
Rails . logger . info " # curl -X GET \" #{ url } \" "
response = UserAgent . get (
url ,
{ } ,
{
2018-12-19 17:31:51 +00:00
json : true ,
2017-12-04 00:24:58 +00:00
open_timeout : 8 ,
read_timeout : 12 ,
2018-12-19 17:31:51 +00:00
user : Setting . get ( 'es_user' ) ,
password : Setting . get ( 'es_password' ) ,
2017-12-04 00:24:58 +00:00
}
)
Rails . logger . info " # #{ response . code } "
2018-03-08 15:54:51 +00:00
if response . success?
installed_version = response . data . dig ( 'version' , 'number' )
raise " Unable to get elasticsearch version from response: #{ response . inspect } " if installed_version . blank?
2018-10-09 06:17:41 +00:00
2019-06-20 10:45:27 +00:00
version_supported = Gem :: Version . new ( installed_version ) < Gem :: Version . new ( '8' )
raise " Version #{ installed_version } of configured elasticsearch is not supported. " if ! version_supported
version_supported = Gem :: Version . new ( installed_version ) > Gem :: Version . new ( '2.3' )
raise " Version #{ installed_version } of configured elasticsearch is not supported. " if ! version_supported
2018-10-09 06:17:41 +00:00
2018-03-08 15:54:51 +00:00
return response . data
end
2018-03-08 15:30:07 +00:00
raise humanized_error (
verb : 'GET' ,
url : url ,
response : response ,
)
2017-12-04 00:24:58 +00:00
end
= begin
update processors
SearchIndexBackend . processors (
_ingest / pipeline / attachment : {
description : 'Extract attachment information from arrays' ,
processors : [
{
foreach : {
field : 'ticket.articles.attachments' ,
processor : {
attachment : {
target_field : '_ingest._value.attachment' ,
field : '_ingest._value.data'
}
}
}
}
]
}
)
= end
def self . processors ( data )
data . each do | key , items |
url = " #{ Setting . get ( 'es_url' ) } / #{ key } "
items . each do | item |
if item [ :action ] == 'delete'
2017-12-04 00:48:41 +00:00
Rails . logger . info " # curl -X DELETE \" #{ url } \" "
response = UserAgent . delete (
url ,
{
2018-12-19 17:31:51 +00:00
json : true ,
2017-12-04 00:48:41 +00:00
open_timeout : 8 ,
read_timeout : 12 ,
2018-12-19 17:31:51 +00:00
user : Setting . get ( 'es_user' ) ,
password : Setting . get ( 'es_password' ) ,
2017-12-04 00:48:41 +00:00
}
)
Rails . logger . info " # #{ response . code } "
next if response . success?
2017-12-15 13:58:41 +00:00
next if response . code . to_s == '404'
2018-03-08 15:30:07 +00:00
raise humanized_error (
verb : 'DELETE' ,
url : url ,
response : response ,
)
2017-12-04 00:24:58 +00:00
end
Rails . logger . info " # curl -X PUT \" #{ url } \" \\ "
2018-03-20 17:47:49 +00:00
Rails . logger . debug { " -d ' #{ data . to_json } ' " }
2017-12-04 00:24:58 +00:00
item . delete ( :action )
response = UserAgent . put (
url ,
item ,
{
2018-12-19 17:31:51 +00:00
json : true ,
2017-12-04 00:24:58 +00:00
open_timeout : 8 ,
read_timeout : 12 ,
2018-12-19 17:31:51 +00:00
user : Setting . get ( 'es_user' ) ,
password : Setting . get ( 'es_password' ) ,
2017-12-04 00:24:58 +00:00
}
)
Rails . logger . info " # #{ response . code } "
next if response . success?
2018-03-08 15:30:07 +00:00
raise humanized_error (
verb : 'PUT' ,
url : url ,
payload : item ,
response : response ,
)
2017-12-04 00:24:58 +00:00
end
end
true
end
= begin
2014-02-03 12:08:41 +00:00
create / update / delete index
SearchIndexBackend . index (
:action = > 'create' , # create/update/delete
2019-06-20 10:45:27 +00:00
:name = > 'Ticket' ,
2014-02-03 12:08:41 +00:00
:data = > {
:mappings = > {
:Ticket = > {
:properties = > {
2015-04-06 19:00:16 +00:00
:articles = > {
2014-02-03 12:08:41 +00:00
:type = > 'nested' ,
:properties = > {
2017-06-16 22:56:28 +00:00
'attachment' = > { :type = > 'attachment' }
2014-04-28 15:30:06 +00:00
}
2014-02-03 12:08:41 +00:00
}
2014-04-28 15:30:06 +00:00
}
}
2014-02-03 12:08:41 +00:00
}
}
)
SearchIndexBackend . index (
:action = > 'delete' , # create/update/delete
2019-06-20 10:45:27 +00:00
:name = > 'Ticket' ,
2014-02-03 12:08:41 +00:00
)
= end
def self . index ( data )
2019-06-20 10:45:27 +00:00
url = build_url ( data [ :name ] , nil , false , false )
2017-12-15 13:58:41 +00:00
return if url . blank?
2014-02-03 12:08:41 +00:00
if data [ :action ] && data [ :action ] == 'delete'
2016-04-26 12:42:55 +00:00
return SearchIndexBackend . remove ( data [ :name ] )
2014-02-03 12:08:41 +00:00
end
2015-05-04 18:58:28 +00:00
Rails . logger . info " # curl -X PUT \" #{ url } \" \\ "
2018-03-20 17:47:49 +00:00
Rails . logger . debug { " -d ' #{ data [ :data ] . to_json } ' " }
2014-02-03 12:08:41 +00:00
2018-07-07 09:11:16 +00:00
# note that we use a high read timeout here because
# otherwise the request will be retried (underhand)
# which leads to an "index_already_exists_exception"
# HTTP 400 status error
# see: https://github.com/ankane/the-ultimate-guide-to-ruby-timeouts/issues/8
# Improving the Elasticsearch config is probably the proper solution
2015-03-23 00:31:30 +00:00
response = UserAgent . put (
url ,
data [ :data ] ,
{
2018-12-19 17:31:51 +00:00
json : true ,
2017-12-04 00:24:58 +00:00
open_timeout : 8 ,
2018-07-07 09:11:16 +00:00
read_timeout : 30 ,
2018-12-19 17:31:51 +00:00
user : Setting . get ( 'es_user' ) ,
password : Setting . get ( 'es_password' ) ,
2015-03-23 00:31:30 +00:00
}
)
2015-05-05 08:26:53 +00:00
Rails . logger . info " # #{ response . code } "
2014-02-03 12:08:41 +00:00
return true if response . success?
2018-03-08 15:30:07 +00:00
raise humanized_error (
verb : 'PUT' ,
url : url ,
payload : data [ :data ] ,
response : response ,
)
2014-02-03 12:08:41 +00:00
end
= begin
2014-01-27 22:59:41 +00:00
add new object to search index
2016-04-26 12:42:55 +00:00
SearchIndexBackend . add ( 'Ticket' , some_data_object )
2014-01-27 22:59:41 +00:00
= end
def self . add ( type , data )
2016-04-26 12:42:55 +00:00
url = build_url ( type , data [ 'id' ] )
2017-12-15 13:58:41 +00:00
return if url . blank?
2014-01-27 22:59:41 +00:00
2015-05-04 18:58:28 +00:00
Rails . logger . info " # curl -X POST \" #{ url } \" \\ "
2018-03-20 17:47:49 +00:00
Rails . logger . debug { " -d ' #{ data . to_json } ' " }
2014-01-27 22:59:41 +00:00
2015-03-23 00:31:30 +00:00
response = UserAgent . post (
url ,
data ,
{
2018-12-19 17:31:51 +00:00
json : true ,
2017-12-04 00:24:58 +00:00
open_timeout : 8 ,
read_timeout : 16 ,
2018-12-19 17:31:51 +00:00
user : Setting . get ( 'es_user' ) ,
password : Setting . get ( 'es_password' ) ,
2015-03-23 00:31:30 +00:00
}
)
2015-05-06 09:30:39 +00:00
Rails . logger . info " # #{ response . code } "
2014-01-27 22:59:41 +00:00
return true if response . success?
2018-03-08 15:30:07 +00:00
raise humanized_error (
verb : 'POST' ,
url : url ,
payload : data ,
response : response ,
)
2014-01-27 22:59:41 +00:00
end
= begin
remove whole data from index
2016-04-26 12:42:55 +00:00
SearchIndexBackend . remove ( 'Ticket' , 123 )
2014-01-27 22:59:41 +00:00
2016-04-26 12:42:55 +00:00
SearchIndexBackend . remove ( 'Ticket' )
2014-01-27 22:59:41 +00:00
= end
2016-04-26 12:42:55 +00:00
def self . remove ( type , o_id = nil )
2019-06-20 10:45:27 +00:00
url = build_url ( type , o_id , false , false )
2017-12-15 13:58:41 +00:00
return if url . blank?
2014-01-27 22:59:41 +00:00
2015-05-05 08:26:53 +00:00
Rails . logger . info " # curl -X DELETE \" #{ url } \" "
2014-01-27 22:59:41 +00:00
2015-03-23 00:31:30 +00:00
response = UserAgent . delete (
url ,
{
2017-12-04 00:24:58 +00:00
open_timeout : 8 ,
read_timeout : 16 ,
2018-12-19 17:31:51 +00:00
user : Setting . get ( 'es_user' ) ,
password : Setting . get ( 'es_password' ) ,
2015-03-23 00:31:30 +00:00
}
)
2015-10-20 08:48:43 +00:00
Rails . logger . info " # #{ response . code } "
2015-03-23 00:31:30 +00:00
return true if response . success?
2017-12-15 13:58:41 +00:00
return true if response . code . to_s == '400'
2018-03-08 15:30:07 +00:00
humanized_error = humanized_error (
verb : 'DELETE' ,
url : url ,
response : response ,
)
Rails . logger . info " NOTICE: can't delete index: #{ humanized_error } "
2015-04-06 19:00:16 +00:00
false
2014-01-27 22:59:41 +00:00
end
= begin
2018-11-06 16:11:10 +00:00
@param query [ String ] search query
2019-06-20 10:45:27 +00:00
@param index [ String , Array < String > ] indexes to search in ( see search_by_index )
2018-11-06 16:11:10 +00:00
@param options [ Hash ] search options ( see build_query )
2014-01-27 22:59:41 +00:00
2018-11-06 16:11:10 +00:00
@return search result
2014-09-19 21:35:40 +00:00
2018-11-06 16:11:10 +00:00
@example Sample queries
2014-01-27 22:59:41 +00:00
2018-11-06 16:11:10 +00:00
result = SearchIndexBackend . search ( 'search query' , [ 'User' , 'Organization' ] , limit : limit )
result = SearchIndexBackend . search ( 'search query' , 'User' , limit : limit )
result = SearchIndexBackend . search ( 'search query' , 'User' , limit : limit , sort_by : [ 'updated_at' ] , order_by : [ 'desc' ] )
2018-07-18 14:00:06 +00:00
2014-09-19 21:35:40 +00:00
result = [
{
:id = > 123 ,
:type = > 'User' ,
} ,
{
:id = > 125 ,
:type = > 'User' ,
} ,
{
:id = > 15 ,
:type = > 'Organization' ,
}
]
2014-01-27 22:59:41 +00:00
= end
2019-06-20 10:45:27 +00:00
def self . search ( query , index , options = { } )
2018-11-06 16:11:10 +00:00
if ! index . is_a? Array
return search_by_index ( query , index , options )
2016-07-07 23:22:09 +00:00
end
2018-11-06 16:11:10 +00:00
index
. map { | local_index | search_by_index ( query , local_index , options ) }
. compact
. flatten ( 1 )
2016-07-07 23:22:09 +00:00
end
2018-11-06 16:11:10 +00:00
= begin
2019-06-20 10:45:27 +00:00
@param query [ String ] search query
@param index [ String ] index name
2018-11-06 16:11:10 +00:00
@param options [ Hash ] search options ( see build_query )
@return search result
= end
2019-06-20 10:45:27 +00:00
def self . search_by_index ( query , index , options = { } )
2017-12-15 13:58:41 +00:00
return [ ] if query . blank?
2014-01-29 23:51:55 +00:00
2017-12-04 00:24:58 +00:00
url = build_url
2017-12-15 13:58:41 +00:00
return if url . blank?
2018-10-09 06:17:41 +00:00
2019-06-20 10:45:27 +00:00
url += build_search_url ( index )
2014-01-29 23:51:55 +00:00
2014-02-03 12:08:41 +00:00
# real search condition
2014-02-02 18:58:31 +00:00
condition = {
'query_string' = > {
2018-12-19 17:31:51 +00:00
'query' = > append_wildcard_to_simple_query ( query ) ,
2017-05-15 16:20:39 +00:00
'default_operator' = > 'AND' ,
2018-10-02 11:50:50 +00:00
'analyze_wildcard' = > true ,
2014-02-02 18:58:31 +00:00
}
}
2018-11-06 16:11:10 +00:00
2019-06-04 03:40:48 +00:00
if ( fields = options . dig ( :highlight_fields_by_indexes , index . to_sym ) )
condition [ 'query_string' ] [ 'fields' ] = fields
end
2018-11-06 16:11:10 +00:00
query_data = build_query ( condition , options )
2014-01-29 23:51:55 +00:00
2019-06-04 03:40:48 +00:00
if ( fields = options . dig ( :highlight_fields_by_indexes , index . to_sym ) )
fields_for_highlight = fields . each_with_object ( { } ) { | elem , memo | memo [ elem ] = { } }
query_data [ :highlight ] = { fields : fields_for_highlight }
end
2015-05-04 18:58:28 +00:00
Rails . logger . info " # curl -X POST \" #{ url } \" \\ "
2018-11-06 16:11:10 +00:00
Rails . logger . debug { " -d' #{ query_data . to_json } ' " }
2014-01-29 23:51:55 +00:00
2015-03-23 00:31:30 +00:00
response = UserAgent . get (
url ,
2018-11-06 16:11:10 +00:00
query_data ,
2015-03-23 00:31:30 +00:00
{
2018-12-19 17:31:51 +00:00
json : true ,
2015-04-27 13:42:53 +00:00
open_timeout : 5 ,
read_timeout : 14 ,
2018-12-19 17:31:51 +00:00
user : Setting . get ( 'es_user' ) ,
password : Setting . get ( 'es_password' ) ,
2015-03-23 00:31:30 +00:00
}
)
2015-05-06 09:30:39 +00:00
Rails . logger . info " # #{ response . code } "
2014-02-03 18:26:22 +00:00
if ! response . success?
2018-03-08 15:30:07 +00:00
Rails . logger . error humanized_error (
verb : 'GET' ,
url : url ,
2018-11-06 16:11:10 +00:00
payload : query_data ,
2018-03-08 15:30:07 +00:00
response : response ,
)
2014-02-03 19:40:42 +00:00
return [ ]
2014-02-03 18:26:22 +00:00
end
2018-11-06 16:11:10 +00:00
data = response . data & . dig ( 'hits' , 'hits' )
2014-02-03 18:26:22 +00:00
2018-11-06 16:11:10 +00:00
return [ ] if ! data
2018-10-09 06:17:41 +00:00
2018-11-06 16:11:10 +00:00
data . map do | item |
2015-05-06 09:30:39 +00:00
Rails . logger . info " ... #{ item [ '_type' ] } #{ item [ '_id' ] } "
2018-11-06 16:11:10 +00:00
2019-06-04 03:40:48 +00:00
output = {
2019-06-20 10:45:27 +00:00
id : item [ '_id' ] ,
type : index ,
2014-09-19 21:35:40 +00:00
}
2019-06-04 03:40:48 +00:00
if options . dig ( :highlight_fields_by_indexes , index . to_sym )
output [ :highlight ] = item [ 'highlight' ]
end
output
2017-10-01 12:25:52 +00:00
end
2014-01-27 22:59:41 +00:00
end
2018-11-06 16:11:10 +00:00
def self . search_by_index_sort ( sort_by = nil , order_by = nil )
2018-07-18 14:00:06 +00:00
result = [ ]
2018-11-06 16:11:10 +00:00
sort_by & . each_with_index do | value , index |
2018-07-18 14:00:06 +00:00
next if value . blank?
2018-11-06 16:11:10 +00:00
next if order_by & . at ( index ) . blank?
2018-07-18 14:00:06 +00:00
2019-04-17 06:26:26 +00:00
# for sorting values use .raw values (no analyzer is used - plain values)
2018-07-18 14:00:06 +00:00
if value !~ / \ . / && value !~ / _(time|date|till|id|ids|at)$ /
value += '.raw'
end
result . push (
value = > {
order : order_by [ index ] ,
} ,
)
end
if result . blank?
result . push (
2018-07-19 16:54:12 +00:00
updated_at : {
2018-07-18 14:00:06 +00:00
order : 'desc' ,
} ,
)
end
result . push ( '_score' )
result
end
2014-02-02 18:58:31 +00:00
= begin
2015-10-29 02:33:36 +00:00
get count of tickets and tickets which match on selector
2015-10-20 08:48:43 +00:00
2019-04-17 06:26:26 +00:00
result = SearchIndexBackend . selectors ( index , selector )
example with a simple search :
2019-06-20 10:45:27 +00:00
result = SearchIndexBackend . selectors ( 'Ticket' , { 'category' = > { 'operator' = > 'is' , 'value' = > 'aa::ab' } } )
2019-04-17 06:26:26 +00:00
result = [
{ id : 1 , type : 'Ticket' } ,
{ id : 2 , type : 'Ticket' } ,
{ id : 3 , type : 'Ticket' } ,
]
you also can get aggregations
result = SearchIndexBackend . selectors ( index , selector , options , aggs_interval )
example for aggregations within one year
2015-10-29 02:33:36 +00:00
aggs_interval = {
from : '2015-01-01' ,
to : '2015-12-31' ,
interval : 'month' , # year, quarter, month, week, day, hour, minute, second
field : 'created_at' ,
}
2015-10-20 08:48:43 +00:00
2019-03-26 00:17:17 +00:00
options = {
limit : 123 ,
current_user : User . find ( 123 ) ,
}
2019-06-20 10:45:27 +00:00
result = SearchIndexBackend . selectors ( 'Ticket' , { 'category' = > { 'operator' = > 'is' , 'value' = > 'aa::ab' } } , options , aggs_interval )
2015-10-20 08:48:43 +00:00
result = {
hits : {
total : 4819 ,
} ,
aggregations : {
time_buckets : {
buckets : [
{
key_as_string : " 2014-10-01T00:00:00.000Z " ,
key : 1412121600000 ,
doc_count : 420
} ,
{
key_as_string : " 2014-11-01T00:00:00.000Z " ,
key : 1414800000000 ,
doc_count : 561
} ,
...
]
}
}
}
= end
2019-06-20 10:45:27 +00:00
def self . selectors ( index , selectors = nil , options = { } , aggs_interval = nil )
2016-03-01 14:26:46 +00:00
raise 'no selectors given' if ! selectors
2015-10-20 08:48:43 +00:00
2019-06-20 10:45:27 +00:00
url = build_url ( nil , nil , false , false )
2017-12-15 13:58:41 +00:00
return if url . blank?
2018-10-09 06:17:41 +00:00
2019-06-20 10:45:27 +00:00
url += build_search_url ( index )
2015-10-20 08:48:43 +00:00
2019-03-26 00:17:17 +00:00
data = selector2query ( selectors , options , aggs_interval )
2015-10-20 08:48:43 +00:00
Rails . logger . info " # curl -X POST \" #{ url } \" \\ "
2018-03-20 17:47:49 +00:00
Rails . logger . debug { " -d' #{ data . to_json } ' " }
2015-10-20 08:48:43 +00:00
response = UserAgent . get (
url ,
data ,
{
2018-12-19 17:31:51 +00:00
json : true ,
2015-10-20 08:48:43 +00:00
open_timeout : 5 ,
read_timeout : 14 ,
2018-12-19 17:31:51 +00:00
user : Setting . get ( 'es_user' ) ,
password : Setting . get ( 'es_password' ) ,
2015-10-20 08:48:43 +00:00
}
)
Rails . logger . info " # #{ response . code } "
if ! response . success?
2018-03-08 15:30:07 +00:00
raise humanized_error (
verb : 'GET' ,
url : url ,
payload : data ,
response : response ,
)
2015-10-20 08:48:43 +00:00
end
2018-03-20 17:47:49 +00:00
Rails . logger . debug { response . data . to_json }
2015-10-29 02:33:36 +00:00
2017-12-15 13:58:41 +00:00
if aggs_interval . blank? || aggs_interval [ :interval ] . blank?
2015-10-29 02:33:36 +00:00
ticket_ids = [ ]
2017-10-01 12:25:52 +00:00
response . data [ 'hits' ] [ 'hits' ] . each do | item |
2015-10-29 02:33:36 +00:00
ticket_ids . push item [ '_id' ]
2017-10-01 12:25:52 +00:00
end
2015-10-29 02:33:36 +00:00
return {
2018-12-19 17:31:51 +00:00
count : response . data [ 'hits' ] [ 'total' ] ,
2015-10-29 02:33:36 +00:00
ticket_ids : ticket_ids ,
}
end
2015-10-20 08:48:43 +00:00
response . data
end
2019-03-26 00:17:17 +00:00
DEFAULT_SELECTOR_OPTIONS = {
limit : 10
} . freeze
def self . selector2query ( selector , options , aggs_interval )
options = DEFAULT_QUERY_OPTIONS . merge ( options . deep_symbolize_keys )
2015-10-29 02:33:36 +00:00
query_must = [ ]
query_must_not = [ ]
2018-07-03 14:39:42 +00:00
relative_map = {
2018-12-19 17:31:51 +00:00
day : 'd' ,
year : 'y' ,
month : 'M' ,
hour : 'h' ,
2018-07-03 14:39:42 +00:00
minute : 'm' ,
}
2017-12-04 00:24:58 +00:00
if selector . present?
2017-10-01 12:25:52 +00:00
selector . each do | key , data |
2015-10-29 02:33:36 +00:00
key_tmp = key . sub ( / ^.+? \ . / , '' )
t = { }
2018-07-03 14:39:42 +00:00
2019-04-17 06:26:26 +00:00
# use .raw in cases where query contains ::
if data [ 'value' ] . is_a? ( Array )
data [ 'value' ] . each do | value |
if value . is_a? ( String ) && value =~ / :: /
key_tmp += '.raw'
break
end
end
elsif data [ 'value' ] . is_a? ( String )
if / :: / . match? ( data [ 'value' ] )
key_tmp += '.raw'
end
end
2018-07-03 14:39:42 +00:00
# is/is not/contains/contains not
if data [ 'operator' ] == 'is' || data [ 'operator' ] == 'is not' || data [ 'operator' ] == 'contains' || data [ 'operator' ] == 'contains not'
2019-04-17 06:26:26 +00:00
if data [ 'value' ] . is_a? ( Array )
2018-07-03 14:39:42 +00:00
t [ :terms ] = { }
t [ :terms ] [ key_tmp ] = data [ 'value' ]
else
t [ :term ] = { }
t [ :term ] [ key_tmp ] = data [ 'value' ]
end
2018-07-04 16:24:51 +00:00
if data [ 'operator' ] == 'is' || data [ 'operator' ] == 'contains'
query_must . push t
elsif data [ 'operator' ] == 'is not' || data [ 'operator' ] == 'contains not'
query_must_not . push t
end
elsif data [ 'operator' ] == 'contains all' || data [ 'operator' ] == 'contains one' || data [ 'operator' ] == 'contains all not' || data [ 'operator' ] == 'contains one not'
values = data [ 'value' ] . split ( ',' ) . map ( & :strip )
t [ :query_string ] = { }
if data [ 'operator' ] == 'contains all'
t [ :query_string ] [ :query ] = " #{ key_tmp } : \" #{ values . join ( '" AND "' ) } \" "
query_must . push t
elsif data [ 'operator' ] == 'contains one not'
t [ :query_string ] [ :query ] = " #{ key_tmp } : \" #{ values . join ( '" OR "' ) } \" "
query_must_not . push t
elsif data [ 'operator' ] == 'contains one'
t [ :query_string ] [ :query ] = " #{ key_tmp } : \" #{ values . join ( '" OR "' ) } \" "
query_must . push t
elsif data [ 'operator' ] == 'contains all not'
t [ :query_string ] [ :query ] = " #{ key_tmp } : \" #{ values . join ( '" AND "' ) } \" "
query_must_not . push t
end
2018-07-03 14:39:42 +00:00
# within last/within next (relative)
elsif data [ 'operator' ] == 'within last (relative)' || data [ 'operator' ] == 'within next (relative)'
range = relative_map [ data [ 'range' ] . to_sym ]
if range . blank?
raise " Invalid relative_map for range ' #{ data [ 'range' ] } '. "
end
2018-10-09 06:17:41 +00:00
2018-07-03 14:39:42 +00:00
t [ :range ] = { }
t [ :range ] [ key_tmp ] = { }
if data [ 'operator' ] == 'within last (relative)'
t [ :range ] [ key_tmp ] [ :gte ] = " now- #{ data [ 'value' ] } #{ range } "
else
t [ :range ] [ key_tmp ] [ :lt ] = " now+ #{ data [ 'value' ] } #{ range } "
end
2018-07-04 16:24:51 +00:00
query_must . push t
2018-07-03 14:39:42 +00:00
# before/after (relative)
elsif data [ 'operator' ] == 'before (relative)' || data [ 'operator' ] == 'after (relative)'
range = relative_map [ data [ 'range' ] . to_sym ]
if range . blank?
raise " Invalid relative_map for range ' #{ data [ 'range' ] } '. "
end
2018-10-09 06:17:41 +00:00
2018-07-03 14:39:42 +00:00
t [ :range ] = { }
t [ :range ] [ key_tmp ] = { }
if data [ 'operator' ] == 'before (relative)'
t [ :range ] [ key_tmp ] [ :lt ] = " now- #{ data [ 'value' ] } #{ range } "
else
t [ :range ] [ key_tmp ] [ :gt ] = " now+ #{ data [ 'value' ] } #{ range } "
end
2018-07-04 16:24:51 +00:00
query_must . push t
2018-07-03 14:39:42 +00:00
# before/after (absolute)
elsif data [ 'operator' ] == 'before (absolute)' || data [ 'operator' ] == 'after (absolute)'
t [ :range ] = { }
t [ :range ] [ key_tmp ] = { }
if data [ 'operator' ] == 'before (absolute)'
2019-06-20 10:45:27 +00:00
t [ :range ] [ key_tmp ] [ :lt ] = ( data [ 'value' ] )
2018-07-03 14:39:42 +00:00
else
2019-06-20 10:45:27 +00:00
t [ :range ] [ key_tmp ] [ :gt ] = ( data [ 'value' ] )
2018-07-03 14:39:42 +00:00
end
query_must . push t
2015-10-29 02:33:36 +00:00
else
2018-01-15 11:54:26 +00:00
raise " unknown operator ' #{ data [ 'operator' ] } ' for #{ key } "
2015-10-29 02:33:36 +00:00
end
2017-10-01 12:25:52 +00:00
end
2015-10-29 02:33:36 +00:00
end
data = {
2015-10-29 09:45:29 +00:00
query : { } ,
2019-03-26 00:17:17 +00:00
size : options [ :limit ] ,
2015-10-29 02:33:36 +00:00
}
# add aggs to filter
2017-12-15 13:58:41 +00:00
if aggs_interval . present?
if aggs_interval [ :interval ] . present?
2015-10-29 02:33:36 +00:00
data [ :size ] = 0
data [ :aggs ] = {
time_buckets : {
date_histogram : {
2018-12-19 17:31:51 +00:00
field : aggs_interval [ :field ] ,
2015-10-29 02:33:36 +00:00
interval : aggs_interval [ :interval ] ,
}
}
}
2019-03-26 00:17:17 +00:00
if aggs_interval [ :timezone ] . present?
data [ :aggs ] [ :time_buckets ] [ :date_histogram ] [ :time_zone ] = aggs_interval [ :timezone ]
end
2015-10-29 02:33:36 +00:00
end
r = { }
r [ :range ] = { }
r [ :range ] [ aggs_interval [ :field ] ] = {
from : aggs_interval [ :from ] ,
2018-12-19 17:31:51 +00:00
to : aggs_interval [ :to ] ,
2015-10-29 02:33:36 +00:00
}
2016-11-18 14:42:06 +00:00
query_must . push r
2015-10-29 02:33:36 +00:00
end
2017-12-15 13:58:41 +00:00
data [ :query ] [ :bool ] || = { }
2016-11-18 14:42:06 +00:00
2017-11-23 08:09:44 +00:00
if query_must . present?
2016-11-18 14:42:06 +00:00
data [ :query ] [ :bool ] [ :must ] = query_must
2015-10-29 02:33:36 +00:00
end
2017-11-23 08:09:44 +00:00
if query_must_not . present?
2016-11-18 14:42:06 +00:00
data [ :query ] [ :bool ] [ :must_not ] = query_must_not
2015-10-29 02:33:36 +00:00
end
2015-10-29 09:07:45 +00:00
# add sort
2017-12-15 13:58:41 +00:00
if aggs_interval . present? && aggs_interval [ :field ] . present? && aggs_interval [ :interval ] . blank?
2015-10-29 09:07:45 +00:00
sort = [ ]
sort [ 0 ] = { }
sort [ 0 ] [ aggs_interval [ :field ] ] = {
order : 'desc'
}
sort [ 1 ] = '_score'
data [ 'sort' ] = sort
end
2015-10-29 02:33:36 +00:00
data
end
2015-10-20 08:48:43 +00:00
= begin
2014-04-28 15:30:06 +00:00
return true if backend is configured
2014-02-02 18:58:31 +00:00
result = SearchIndexBackend . enabled?
= end
def self . enabled?
2017-06-28 17:13:52 +00:00
return false if Setting . get ( 'es_url' ) . blank?
2018-10-09 06:17:41 +00:00
2014-02-02 18:58:31 +00:00
true
end
2019-06-20 10:45:27 +00:00
def self . build_index_name ( index )
local_index = " #{ Setting . get ( 'es_index' ) } _ #{ Rails . env } "
" #{ local_index } _ #{ index . underscore . tr ( '/' , '_' ) } "
end
def self . build_url ( type = nil , o_id = nil , pipeline = true , with_type = true )
2014-02-02 18:58:31 +00:00
return if ! SearchIndexBackend . enabled?
2018-10-09 06:17:41 +00:00
2019-06-20 10:45:27 +00:00
# for elasticsearch 5.6 and lower
2016-07-07 23:22:09 +00:00
index = " #{ Setting . get ( 'es_index' ) } _ #{ Rails . env } "
2019-06-20 10:45:27 +00:00
if Setting . get ( 'es_multi_index' ) == false
url = Setting . get ( 'es_url' )
url = if type
url_pipline = Setting . get ( 'es_pipeline' )
if url_pipline . present?
url_pipline = " ?pipeline= #{ url_pipline } "
end
if o_id
" #{ url } / #{ index } / #{ type } / #{ o_id } #{ url_pipline } "
else
" #{ url } / #{ index } / #{ type } #{ url_pipline } "
end
2016-01-15 17:22:57 +00:00
else
2019-06-20 10:45:27 +00:00
" #{ url } / #{ index } "
2016-01-15 17:22:57 +00:00
end
2019-06-20 10:45:27 +00:00
return url
end
# for elasticsearch 6.x and higher
url = Setting . get ( 'es_url' )
if pipeline == true
url_pipline = Setting . get ( 'es_pipeline' )
if url_pipline . present?
url_pipline = " ?pipeline= #{ url_pipline } "
end
end
if type
index = build_index_name ( type )
if with_type == false
return " #{ url } / #{ index } "
end
if o_id
return " #{ url } / #{ index } /_doc/ #{ o_id } #{ url_pipline } "
end
return " #{ url } / #{ index } /_doc #{ url_pipline } "
end
" #{ url } / "
end
def self . build_search_url ( index )
# for elasticsearch 5.6 and lower
if Setting . get ( 'es_multi_index' ) == false
if index
return " / #{ index } /_search "
end
return '/_search'
end
# for elasticsearch 6.x and higher
" #{ build_index_name ( index ) } /_doc/_search "
2014-01-28 09:58:49 +00:00
end
2018-03-08 15:30:07 +00:00
def self . humanized_error ( verb : , url : , payload : nil , response : )
prefix = " Unable to process #{ verb } request to elasticsearch URL ' #{ url } '. "
suffix = " \n \n Response: \n #{ response . inspect } \n \n Payload: \n #{ payload . inspect } "
if payload . respond_to? ( :to_json )
suffix += " \n \n Payload size: #{ payload . to_json . bytesize / 1024 / 1024 } M "
end
message = if response & . error & . match? ( 'Connection refused' )
" Elasticsearch is not reachable, probably because it's not running or even installed. "
2018-04-03 09:27:58 +00:00
elsif url . end_with? ( 'pipeline/zammad-attachment' , 'pipeline=zammad-attachment' ) && response . code == 400
2018-03-08 15:30:07 +00:00
'The installed attachment plugin could not handle the request payload. Ensure that the correct attachment plugin is installed (5.6 => ingest-attachment, 2.4 - 5.5 => mapper-attachments).'
else
'Check the response and payload for detailed information: '
end
2018-05-11 08:16:18 +00:00
result = " #{ prefix } #{ message } #{ suffix } "
Rails . logger . error result . first ( 40_000 )
result
2018-03-08 15:30:07 +00:00
end
2018-07-03 07:18:55 +00:00
2018-10-02 11:50:50 +00:00
# add * on simple query like "somephrase23"
2018-07-03 07:18:55 +00:00
def self . append_wildcard_to_simple_query ( query )
query . strip!
2018-10-02 11:50:50 +00:00
query += '*' if ! query . match? ( / : / )
2018-07-03 07:18:55 +00:00
query
end
2018-11-06 16:11:10 +00:00
= begin
@param condition [ Hash ] search condition
@param options [ Hash ] search options
@option options [ Integer ] :from
@option options [ Integer ] :limit
@option options [ Hash ] :query_extension applied to ElasticSearch query
@option options [ Array < String > ] :order_by ordering directions , desc or asc
@option options [ Array < String > ] :sort_by fields to sort by
= end
DEFAULT_QUERY_OPTIONS = {
from : 0 ,
limit : 10
} . freeze
def self . build_query ( condition , options = { } )
options = DEFAULT_QUERY_OPTIONS . merge ( options . deep_symbolize_keys )
data = {
from : options [ :from ] ,
size : options [ :limit ] ,
sort : search_by_index_sort ( options [ :sort_by ] , options [ :order_by ] ) ,
query : {
bool : {
must : [ ]
}
}
}
if ( extension = options . dig ( :query_extension ) )
data [ :query ] . deep_merge! extension . deep_dup
end
data [ :query ] [ :bool ] [ :must ] . push condition
data
end
2019-06-20 10:45:27 +00:00
2015-04-27 14:15:29 +00:00
end