Added support for elasticsearch 6 and 7. Fixes #1688.

This commit is contained in:
Martin Edenhofer 2019-06-20 12:45:27 +02:00
parent 9949dd18dc
commit af9fcf4565
11 changed files with 310 additions and 155 deletions

View file

@ -214,16 +214,9 @@ test:integration:clearbit:
### Elasticsearch ### Elasticsearch
test:integration:es: .script_integration_es_template: &script_integration_es_definition
<<: *base_env <<: *base_env
stage: test stage: test
services:
- name: registry.znuny.com/docker/zammad-mysql:latest
alias: mysql
- name: registry.znuny.com/docker/zammad-postgresql:latest
alias: postgresql
- name: registry.znuny.com/docker/zammad-elasticsearch:5.6
alias: elasticsearch
variables: variables:
RAILS_ENV: "test" RAILS_ENV: "test"
ES_INDEX_RAND: "true" ES_INDEX_RAND: "true"
@ -235,6 +228,36 @@ test:integration:es:
- bundle exec rspec --tag searchindex - bundle exec rspec --tag searchindex
- bundle exec rails test test/integration/report_test.rb - bundle exec rails test test/integration/report_test.rb
test:integration:es:5.6:
<<: *script_integration_es_definition
services:
- name: registry.znuny.com/docker/zammad-mysql:latest
alias: mysql
- name: registry.znuny.com/docker/zammad-postgresql:latest
alias: postgresql
- name: registry.znuny.com/docker/zammad-elasticsearch:5.6
alias: elasticsearch
test:integration:es:6:
<<: *script_integration_es_definition
services:
- name: registry.znuny.com/docker/zammad-mysql:latest
alias: mysql
- name: registry.znuny.com/docker/zammad-postgresql:latest
alias: postgresql
- name: registry.znuny.com/docker/zammad-elasticsearch:6
alias: elasticsearch
test:integration:es:7:
<<: *script_integration_es_definition
services:
- name: registry.znuny.com/docker/zammad-mysql:latest
alias: mysql
- name: registry.znuny.com/docker/zammad-postgresql:latest
alias: postgresql
- name: registry.znuny.com/docker/zammad-elasticsearch:7
alias: elasticsearch
### Zendesk ### Zendesk
test:integration:zendesk: test:integration:zendesk:
@ -320,7 +343,7 @@ browser:build:
alias: mysql alias: mysql
- name: registry.znuny.com/docker/zammad-postgresql:latest - name: registry.znuny.com/docker/zammad-postgresql:latest
alias: postgresql alias: postgresql
- name: registry.znuny.com/docker/zammad-elasticsearch:5.6 - name: registry.znuny.com/docker/zammad-elasticsearch:stable
alias: elasticsearch alias: elasticsearch
- name: docker.io/elgalu/selenium:3.14.0-p17 - name: docker.io/elgalu/selenium:3.14.0-p17
alias: selenium alias: selenium

View file

@ -197,12 +197,16 @@ class FormController < ApplicationController
def limit_reached? def limit_reached?
return false if !SearchIndexBackend.enabled? return false if !SearchIndexBackend.enabled?
# quote ipv6 ip'
remote_ip = request.remote_ip.gsub(':', '\\:')
# in elasticsearch7 "created_at:>now-1h" is not working. Needed to catch -2h
form_limit_by_ip_per_hour = Setting.get('form_ticket_create_by_ip_per_hour') || 20 form_limit_by_ip_per_hour = Setting.get('form_ticket_create_by_ip_per_hour') || 20
result = SearchIndexBackend.search("preferences.form.remote_ip:'#{request.remote_ip}' AND created_at:>now-1h", 'Ticket', limit: form_limit_by_ip_per_hour) result = SearchIndexBackend.search("preferences.form.remote_ip:'#{remote_ip}' AND created_at:>now-2h", 'Ticket', limit: form_limit_by_ip_per_hour)
raise Exceptions::NotAuthorized if result.count >= form_limit_by_ip_per_hour.to_i raise Exceptions::NotAuthorized if result.count >= form_limit_by_ip_per_hour.to_i
form_limit_by_ip_per_day = Setting.get('form_ticket_create_by_ip_per_day') || 240 form_limit_by_ip_per_day = Setting.get('form_ticket_create_by_ip_per_day') || 240
result = SearchIndexBackend.search("preferences.form.remote_ip:'#{request.remote_ip}' AND created_at:>now-1d", 'Ticket', limit: form_limit_by_ip_per_day) result = SearchIndexBackend.search("preferences.form.remote_ip:'#{remote_ip}' AND created_at:>now-1d", 'Ticket', limit: form_limit_by_ip_per_day)
raise Exceptions::NotAuthorized if result.count >= form_limit_by_ip_per_day.to_i raise Exceptions::NotAuthorized if result.count >= form_limit_by_ip_per_day.to_i
form_limit_per_day = Setting.get('form_ticket_create_per_day') || 5000 form_limit_per_day = Setting.get('form_ticket_create_per_day') || 5000

View file

@ -39,8 +39,12 @@ returns
end end
if sort_by.blank? if sort_by.blank?
if default.is_a?(Array)
sort_by = default
else
sort_by.push(default) sort_by.push(default)
end end
end
sort_by sort_by
end end
@ -77,8 +81,12 @@ returns
end end
if order_by.blank? if order_by.blank?
if default.is_a?(Array)
order_by = default
else
order_by.push(default) order_by.push(default)
end end
end
order_by order_by
end end

View file

@ -72,11 +72,11 @@ returns
offset = params[:offset] || 0 offset = params[:offset] || 0
current_user = params[:current_user] current_user = params[:current_user]
# check sort # check sort - positions related to order by
sort_by = search_get_sort_by(params, 'updated_at') sort_by = search_get_sort_by(params, %w[active updated_at])
# check order # check order - positions related to sort by
order_by = search_get_order_by(params, 'desc') order_by = search_get_order_by(params, %w[desc desc])
# enable search only for agents and admins # enable search only for agents and admins
return [] if !search_preferences(current_user) return [] if !search_preferences(current_user)

View file

@ -83,11 +83,11 @@ returns
offset = params[:offset] || 0 offset = params[:offset] || 0
current_user = params[:current_user] current_user = params[:current_user]
# check sort # check sort - positions related to order by
sort_by = search_get_sort_by(params, 'updated_at') sort_by = search_get_sort_by(params, %w[active updated_at])
# check order # check order - positions related to sort by
order_by = search_get_order_by(params, 'desc') order_by = search_get_order_by(params, %w[desc desc])
# enable search only for agents and admins # enable search only for agents and admins
return [] if !search_preferences(current_user) return [] if !search_preferences(current_user)

View file

@ -0,0 +1,18 @@
class SettingEsMultiIndex < ActiveRecord::Migration[5.1]
def up
# return if it's a new setup
return if !Setting.find_by(name: 'system_init_done')
Setting.create_if_not_exists(
title: 'Elasticsearch Multi Index',
name: 'es_multi_index',
area: 'SearchIndex::Elasticsearch',
description: 'Define if Elasticsearch is using multiple indexes.',
state: false,
preferences: { online_service_disable: true },
frontend: false
)
end
end

View file

@ -2849,6 +2849,15 @@ Setting.create_if_not_exists(
preferences: { online_service_disable: true }, preferences: { online_service_disable: true },
frontend: false frontend: false
) )
Setting.create_if_not_exists(
title: 'Elasticsearch Multi Index',
name: 'es_multi_index',
area: 'SearchIndex::Elasticsearch',
description: 'Define if Elasticsearch is using multiple indexes.',
state: false,
preferences: { online_service_disable: true },
frontend: false
)
Setting.create_if_not_exists( Setting.create_if_not_exists(
title: 'Import Mode', title: 'Import Mode',

View file

@ -45,7 +45,7 @@ returns
end end
selector.merge!(without_merged_tickets) # do not show merged tickets in reports selector.merge!(without_merged_tickets) # do not show merged tickets in reports
result_es = SearchIndexBackend.selectors(['Ticket'], selector, {}, aggs_interval) result_es = SearchIndexBackend.selectors('Ticket', selector, {}, aggs_interval)
if params[:interval] == 'month' if params[:interval] == 'month'
stop_interval = 12 stop_interval = 12
elsif params[:interval] == 'week' elsif params[:interval] == 'week'
@ -166,7 +166,7 @@ returns
end end
selector.merge!(without_merged_tickets) # do not show merged tickets in reports selector.merge!(without_merged_tickets) # do not show merged tickets in reports
result = SearchIndexBackend.selectors(['Ticket'], selector, { limit: limit }, aggs_interval) result = SearchIndexBackend.selectors('Ticket', selector, { limit: limit }, aggs_interval)
return result if params[:sheet].present? return result if params[:sheet].present?
assets = {} assets = {}

View file

@ -31,8 +31,11 @@ info about used search index machine
installed_version = response.data.dig('version', 'number') installed_version = response.data.dig('version', 'number')
raise "Unable to get elasticsearch version from response: #{response.inspect}" if installed_version.blank? raise "Unable to get elasticsearch version from response: #{response.inspect}" if installed_version.blank?
version_supported = Gem::Version.new(installed_version) < Gem::Version.new('5.7') version_supported = Gem::Version.new(installed_version) < Gem::Version.new('8')
raise "Version #{installed_version} of configured elasticsearch is not supported" if !version_supported raise "Version #{installed_version} of configured elasticsearch is not supported." if !version_supported
version_supported = Gem::Version.new(installed_version) > Gem::Version.new('2.3')
raise "Version #{installed_version} of configured elasticsearch is not supported." if !version_supported
return response.data return response.data
end end
@ -130,6 +133,7 @@ create/update/delete index
SearchIndexBackend.index( SearchIndexBackend.index(
:action => 'create', # create/update/delete :action => 'create', # create/update/delete
:name => 'Ticket',
:data => { :data => {
:mappings => { :mappings => {
:Ticket => { :Ticket => {
@ -148,17 +152,14 @@ create/update/delete index
SearchIndexBackend.index( SearchIndexBackend.index(
:action => 'delete', # create/update/delete :action => 'delete', # create/update/delete
:name => 'Ticket', # optional :name => 'Ticket',
) )
SearchIndexBackend.index(
:action => 'delete', # create/update/delete
)
=end =end
def self.index(data) def self.index(data)
url = build_url(data[:name]) url = build_url(data[:name], nil, false, false)
return if url.blank? return if url.blank?
if data[:action] && data[:action] == 'delete' if data[:action] && data[:action] == 'delete'
@ -245,7 +246,7 @@ remove whole data from index
=end =end
def self.remove(type, o_id = nil) def self.remove(type, o_id = nil)
url = build_url(type, o_id) url = build_url(type, o_id, false, false)
return if url.blank? return if url.blank?
Rails.logger.info "# curl -X DELETE \"#{url}\"" Rails.logger.info "# curl -X DELETE \"#{url}\""
@ -275,7 +276,7 @@ remove whole data from index
=begin =begin
@param query [String] search query @param query [String] search query
@param index [String, Array<String>, nil] indexes to search in (see search_by_index) @param index [String, Array<String>] indexes to search in (see search_by_index)
@param options [Hash] search options (see build_query) @param options [Hash] search options (see build_query)
@return search result @return search result
@ -305,7 +306,7 @@ remove whole data from index
=end =end
def self.search(query, index = nil, options = {}) def self.search(query, index, options = {})
if !index.is_a? Array if !index.is_a? Array
return search_by_index(query, index, options) return search_by_index(query, index, options)
end end
@ -319,28 +320,20 @@ remove whole data from index
=begin =begin
@param query [String] search query @param query [String] search query
@param index [String, Array<String>, nil] index name or list of index names. If index is nil or not present will, search will be performed globally @param index [String] index name
@param options [Hash] search options (see build_query) @param options [Hash] search options (see build_query)
@return search result @return search result
=end =end
def self.search_by_index(query, index = nil, options = {}) def self.search_by_index(query, index, options = {})
return [] if query.blank? return [] if query.blank?
url = build_url url = build_url
return if url.blank? return if url.blank?
url += if index url += build_search_url(index)
if index.is_a?(Array)
"/#{index.join(',')}/_search"
else
"/#{index}/_search"
end
else
'/_search'
end
# real search condition # real search condition
condition = { condition = {
@ -396,8 +389,8 @@ remove whole data from index
Rails.logger.info "... #{item['_type']} #{item['_id']}" Rails.logger.info "... #{item['_type']} #{item['_id']}"
output = { output = {
id: item['_id'].to_i, id: item['_id'],
type: item['_type'], type: index,
} }
if options.dig(:highlight_fields_by_indexes, index.to_sym) if options.dig(:highlight_fields_by_indexes, index.to_sym)
@ -434,15 +427,6 @@ remove whole data from index
) )
end end
# add sorting by active if active is not part of the query
if result.flat_map(&:keys).exclude?(:active)
result.unshift(
active: {
order: 'desc',
},
)
end
result.push('_score') result.push('_score')
result result
@ -456,7 +440,7 @@ get count of tickets and tickets which match on selector
example with a simple search: example with a simple search:
result = SearchIndexBackend.selectors('Ticket', { category: { operator: 'is', value: 'aa::ab' } }) result = SearchIndexBackend.selectors('Ticket', { 'category' => { 'operator' => 'is', 'value' => 'aa::ab' } })
result = [ result = [
{ id: 1, type: 'Ticket' }, { id: 1, type: 'Ticket' },
@ -482,7 +466,7 @@ example for aggregations within one year
current_user: User.find(123), current_user: User.find(123),
} }
result = SearchIndexBackend.selectors('Ticket', { category: { operator: 'is', value: 'aa::ab' } }, options, aggs_interval) result = SearchIndexBackend.selectors('Ticket', { 'category' => { 'operator' => 'is', 'value' => 'aa::ab' } }, options, aggs_interval)
result = { result = {
hits:{ hits:{
@ -509,21 +493,13 @@ example for aggregations within one year
=end =end
def self.selectors(index = nil, selectors = nil, options = {}, aggs_interval = nil) def self.selectors(index, selectors = nil, options = {}, aggs_interval = nil)
raise 'no selectors given' if !selectors raise 'no selectors given' if !selectors
url = build_url url = build_url(nil, nil, false, false)
return if url.blank? return if url.blank?
url += if index url += build_search_url(index)
if index.is_a?(Array)
"/#{index.join(',')}/_search"
else
"/#{index}/_search"
end
else
'/_search'
end
data = selector2query(selectors, options, aggs_interval) data = selector2query(selectors, options, aggs_interval)
@ -669,9 +645,9 @@ example for aggregations within one year
t[:range] = {} t[:range] = {}
t[:range][key_tmp] = {} t[:range][key_tmp] = {}
if data['operator'] == 'before (absolute)' if data['operator'] == 'before (absolute)'
t[:range][key_tmp][:lt] = (data['value']).to_s t[:range][key_tmp][:lt] = (data['value'])
else else
t[:range][key_tmp][:gt] = (data['value']).to_s t[:range][key_tmp][:gt] = (data['value'])
end end
query_must.push t query_must.push t
else else
@ -745,10 +721,18 @@ return true if backend is configured
true true
end end
def self.build_url(type = nil, o_id = nil) def self.build_index_name(index)
local_index = "#{Setting.get('es_index')}_#{Rails.env}"
"#{local_index}_#{index.underscore.tr('/', '_')}"
end
def self.build_url(type = nil, o_id = nil, pipeline = true, with_type = true)
return if !SearchIndexBackend.enabled? return if !SearchIndexBackend.enabled?
# for elasticsearch 5.6 and lower
index = "#{Setting.get('es_index')}_#{Rails.env}" index = "#{Setting.get('es_index')}_#{Rails.env}"
if Setting.get('es_multi_index') == false
url = Setting.get('es_url') url = Setting.get('es_url')
url = if type url = if type
url_pipline = Setting.get('es_pipeline') url_pipline = Setting.get('es_pipeline')
@ -763,7 +747,45 @@ return true if backend is configured
else else
"#{url}/#{index}" "#{url}/#{index}"
end end
url return url
end
# for elasticsearch 6.x and higher
url = Setting.get('es_url')
if pipeline == true
url_pipline = Setting.get('es_pipeline')
if url_pipline.present?
url_pipline = "?pipeline=#{url_pipline}"
end
end
if type
index = build_index_name(type)
if with_type == false
return "#{url}/#{index}"
end
if o_id
return "#{url}/#{index}/_doc/#{o_id}#{url_pipline}"
end
return "#{url}/#{index}/_doc#{url_pipline}"
end
"#{url}/"
end
def self.build_search_url(index)
# for elasticsearch 5.6 and lower
if Setting.get('es_multi_index') == false
if index
return "/#{index}/_search"
end
return '/_search'
end
# for elasticsearch 6.x and higher
"#{build_index_name(index)}/_doc/_search"
end end
def self.humanized_error(verb:, url:, payload: nil, response:) def self.humanized_error(verb:, url:, payload: nil, response:)
@ -833,4 +855,5 @@ return true if backend is configured
data data
end end
end end

View file

@ -3,12 +3,21 @@ require 'rubygems'
namespace :searchindex do namespace :searchindex do
task :drop, [:opts] => :environment do |_t, _args| task :drop, [:opts] => :environment do |_t, _args|
print 'drop indexes...'
# drop indexes # drop indexes
print 'drop indexes...' if es_multi_index?
Models.indexable.each do |local_object|
SearchIndexBackend.index(
action: 'delete',
name: local_object.name,
)
end
else
SearchIndexBackend.index( SearchIndexBackend.index(
action: 'delete', action: 'delete',
) )
end
puts 'done' puts 'done'
Rake::Task['searchindex:drop_pipeline'].execute Rake::Task['searchindex:drop_pipeline'].execute
@ -17,22 +26,33 @@ namespace :searchindex do
task :create, [:opts] => :environment do |_t, _args| task :create, [:opts] => :environment do |_t, _args|
print 'create indexes...' print 'create indexes...'
# es with mapper-attachments plugin if es_multi_index?
info = SearchIndexBackend.info Setting.set('es_multi_index', true)
number = nil else
if info.present? Setting.set('es_multi_index', false)
number = info['version']['number'].to_s
end end
settings = { settings = {
'index.mapping.total_fields.limit': 2000, 'index.mapping.total_fields.limit': 2000,
} }
# create indexes
if es_multi_index?
Models.indexable.each do |local_object|
SearchIndexBackend.index(
action: 'create',
name: local_object.name,
data: {
mappings: get_mapping_properties_object(local_object),
settings: settings,
}
)
end
else
mapping = {} mapping = {}
Models.indexable.each do |local_object| Models.indexable.each do |local_object|
mapping.merge!(get_mapping_properties_object(local_object)) mapping.merge!(get_mapping_properties_object(local_object))
end end
# create indexes
SearchIndexBackend.index( SearchIndexBackend.index(
action: 'create', action: 'create',
data: { data: {
@ -40,9 +60,6 @@ namespace :searchindex do
settings: settings, settings: settings,
} }
) )
if number.blank? || number =~ /^[2-4]\./ || number =~ /^5\.[0-5]\./
Setting.set('es_pipeline', '')
end end
puts 'done' puts 'done'
@ -51,14 +68,10 @@ namespace :searchindex do
end end
task :create_pipeline, [:opts] => :environment do |_t, _args| task :create_pipeline, [:opts] => :environment do |_t, _args|
if !es_pipeline?
# es with mapper-attachments plugin Setting.set('es_pipeline', '')
info = SearchIndexBackend.info next
number = nil
if info.present?
number = info['version']['number'].to_s
end end
next if number.blank? || number =~ /^[2-4]\./ || number =~ /^5\.[0-5]\./
# update processors # update processors
pipeline = Setting.get('es_pipeline') pipeline = Setting.get('es_pipeline')
@ -103,14 +116,7 @@ namespace :searchindex do
end end
task :drop_pipeline, [:opts] => :environment do |_t, _args| task :drop_pipeline, [:opts] => :environment do |_t, _args|
next if !es_pipeline?
# es with mapper-attachments plugin
info = SearchIndexBackend.info
number = nil
if info.present?
number = info['version']['number'].to_s
end
next if number.blank? || number =~ /^[2-4]\./ || number =~ /^5\.[0-5]\./
# update processors # update processors
pipeline = Setting.get('es_pipeline') pipeline = Setting.get('es_pipeline')
@ -142,11 +148,9 @@ namespace :searchindex do
end end
task :rebuild, [:opts] => :environment do |_t, _args| task :rebuild, [:opts] => :environment do |_t, _args|
Rake::Task['searchindex:drop'].execute Rake::Task['searchindex:drop'].execute
Rake::Task['searchindex:create'].execute Rake::Task['searchindex:create'].execute
Rake::Task['searchindex:reload'].execute Rake::Task['searchindex:reload'].execute
end end
end end
@ -172,74 +176,87 @@ mapping = {
=end =end
def get_mapping_properties_object(object) def get_mapping_properties_object(object)
name = object.name
if es_multi_index?
name = '_doc'
end
result = { result = {
object.name => { name => {
properties: {} properties: {}
} }
} }
store_columns = %w[preferences data] store_columns = %w[preferences data]
# for elasticsearch 6.x and later
string_type = 'text'
string_raw = { 'type': 'keyword' }
boolean_raw = { 'type': 'boolean' }
# for elasticsearch 5.6 and lower
if !es_multi_index?
string_type = 'string'
string_raw = { 'type': 'string', 'index': 'not_analyzed' }
boolean_raw = { 'type': 'boolean', 'index': 'not_analyzed' }
end
object.columns_hash.each do |key, value| object.columns_hash.each do |key, value|
if value.type == :string && value.limit && value.limit <= 5000 && store_columns.exclude?(key) if value.type == :string && value.limit && value.limit <= 5000 && store_columns.exclude?(key)
result[object.name][:properties][key] = { result[name][:properties][key] = {
type: 'string', type: string_type,
fields: { fields: {
raw: { 'type': 'string', 'index': 'not_analyzed' } raw: string_raw,
} }
} }
elsif value.type == :integer elsif value.type == :integer
result[object.name][:properties][key] = { result[name][:properties][key] = {
type: 'integer', type: 'integer',
} }
elsif value.type == :datetime elsif value.type == :datetime
result[object.name][:properties][key] = { result[name][:properties][key] = {
type: 'date', type: 'date',
} }
elsif value.type == :boolean elsif value.type == :boolean
result[object.name][:properties][key] = { result[name][:properties][key] = {
type: 'boolean', type: 'boolean',
fields: { fields: {
raw: { 'type': 'boolean', 'index': 'not_analyzed' } raw: boolean_raw,
} }
} }
elsif value.type == :binary elsif value.type == :binary
result[object.name][:properties][key] = { result[name][:properties][key] = {
type: 'binary', type: 'binary',
} }
elsif value.type == :bigint elsif value.type == :bigint
result[object.name][:properties][key] = { result[name][:properties][key] = {
type: 'long', type: 'long',
} }
elsif value.type == :decimal elsif value.type == :decimal
result[object.name][:properties][key] = { result[name][:properties][key] = {
type: 'float', type: 'float',
} }
elsif value.type == :date elsif value.type == :date
result[object.name][:properties][key] = { result[name][:properties][key] = {
type: 'date', type: 'date',
} }
end end
end end
# es with mapper-attachments plugin # es with mapper-attachments plugin
info = SearchIndexBackend.info
number = nil
if info.present?
number = info['version']['number'].to_s
end
if object.name == 'Ticket' if object.name == 'Ticket'
result[object.name][:_source] = { # do not server attachments if document is requested
result[name][:_source] = {
excludes: ['article.attachment'] excludes: ['article.attachment']
} }
if number.blank? || number =~ /^[2-4]\./ || number =~ /^5\.[0-5]\./ # for elasticsearch 5.5 and lower
result[object.name][:_source] = { if !es_pipeline?
result[name][:_source] = {
excludes: ['article.attachment'] excludes: ['article.attachment']
} }
result[object.name][:properties][:article] = { result[name][:properties][:article] = {
type: 'nested', type: 'nested',
include_in_parent: true, include_in_parent: true,
properties: { properties: {
@ -251,5 +268,45 @@ def get_mapping_properties_object(object)
end end
end end
result return result if es_type_in_mapping?
result[name]
end
# get es version
def es_version
info = SearchIndexBackend.info
number = nil
if info.present?
number = info['version']['number'].to_s
end
number
end
# no es_pipeline for elasticsearch 5.5 and lower
def es_pipeline?
number = es_version
return false if number.blank?
return false if number =~ /^[2-4]\./
return false if number =~ /^5\.[0-5]\./
true
end
# no mulit index for elasticsearch 5.6 and lower
def es_multi_index?
number = es_version
return false if number.blank?
return false if number =~ /^[2-5]\./
true
end
# no type in mapping
def es_type_in_mapping?
number = es_version
return true if number.blank?
return true if number =~ /^[2-6]\./
false
end end

View file

@ -151,18 +151,14 @@ RSpec.describe 'Form', type: :request, searchindex: true do
token = json_response['token'] token = json_response['token']
(1..20).each do |count| (1..20).each do |count|
travel 10.seconds
post '/api/v1/form_submit', params: { fingerprint: fingerprint, token: token, name: 'Bob Smith', email: 'discard@znuny.com', title: "test#{count}", body: 'hello' }, as: :json post '/api/v1/form_submit', params: { fingerprint: fingerprint, token: token, name: 'Bob Smith', email: 'discard@znuny.com', title: "test#{count}", body: 'hello' }, as: :json
expect(response).to have_http_status(:ok) expect(response).to have_http_status(:ok)
expect(json_response).to be_a_kind_of(Hash) expect(json_response).to be_a_kind_of(Hash)
expect(json_response['errors']).to be_falsey
expect(json_response['errors']).to be_falsey expect(json_response['errors']).to be_falsey
expect(json_response['ticket']).to be_truthy expect(json_response['ticket']).to be_truthy
expect(json_response['ticket']['id']).to be_truthy expect(json_response['ticket']['id']).to be_truthy
expect(json_response['ticket']['number']).to be_truthy
Scheduler.worker(true) Scheduler.worker(true)
sleep 1 # wait until elasticsearch is index
end end
sleep 10 # wait until elasticsearch is index sleep 10 # wait until elasticsearch is index
@ -175,7 +171,6 @@ RSpec.describe 'Form', type: :request, searchindex: true do
@headers = { 'ACCEPT' => 'application/json', 'CONTENT_TYPE' => 'application/json', 'REMOTE_ADDR' => '1.2.3.5' } @headers = { 'ACCEPT' => 'application/json', 'CONTENT_TYPE' => 'application/json', 'REMOTE_ADDR' => '1.2.3.5' }
(1..20).each do |count| (1..20).each do |count|
travel 10.seconds
post '/api/v1/form_submit', params: { fingerprint: fingerprint, token: token, name: 'Bob Smith', email: 'discard@znuny.com', title: "test-2-#{count}", body: 'hello' }, as: :json post '/api/v1/form_submit', params: { fingerprint: fingerprint, token: token, name: 'Bob Smith', email: 'discard@znuny.com', title: "test-2-#{count}", body: 'hello' }, as: :json
expect(response).to have_http_status(:ok) expect(response).to have_http_status(:ok)
expect(json_response).to be_a_kind_of(Hash) expect(json_response).to be_a_kind_of(Hash)
@ -183,9 +178,27 @@ RSpec.describe 'Form', type: :request, searchindex: true do
expect(json_response['errors']).to be_falsey expect(json_response['errors']).to be_falsey
expect(json_response['ticket']).to be_truthy expect(json_response['ticket']).to be_truthy
expect(json_response['ticket']['id']).to be_truthy expect(json_response['ticket']['id']).to be_truthy
expect(json_response['ticket']['number']).to be_truthy
Scheduler.worker(true) Scheduler.worker(true)
sleep 1 # wait until elasticsearch is index end
sleep 10 # wait until elasticsearch is index
post '/api/v1/form_submit', params: { fingerprint: fingerprint, token: token, name: 'Bob Smith', email: 'discard@znuny.com', title: 'test-2-last', body: 'hello' }, as: :json
expect(response).to have_http_status(:unauthorized)
expect(json_response).to be_a_kind_of(Hash)
expect(json_response['error']).to be_truthy
@headers = { 'ACCEPT' => 'application/json', 'CONTENT_TYPE' => 'application/json', 'REMOTE_ADDR' => '::1' }
(1..20).each do |count|
post '/api/v1/form_submit', params: { fingerprint: fingerprint, token: token, name: 'Bob Smith', email: 'discard@znuny.com', title: "test-2-#{count}", body: 'hello' }, as: :json
expect(response).to have_http_status(:ok)
expect(json_response).to be_a_kind_of(Hash)
expect(json_response['errors']).to be_falsey
expect(json_response['ticket']).to be_truthy
expect(json_response['ticket']['id']).to be_truthy
Scheduler.worker(true)
end end
sleep 10 # wait until elasticsearch is index sleep 10 # wait until elasticsearch is index