Hi all,
I am receiving the following logstash errors after installing X-Pack in logstash:
[2018-06-14T13:24:40,458][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"wazuh-alerts-3.x-2018.06.14", :_type=>"wazuh", :_routing=>nil}, #<LogStash::Event:0x31b18a15>], :response=>{"index"=>{"_index"=>"wazuh-alerts-3.x-2018.06.14", "_type"=>"wazuh", "_id"=>"kZ54_mMB86eT4RWzM1CD", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse [host]", "caused_by"=>{"type"=>"illegal_state_exception", "reason"=>"Can't get text on a START_OBJECT at 1:114"}}}}}
[2018-06-14T13:24:40,462][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"wazuh-alerts-3.x-2018.06.14", :_type=>"wazuh", :_routing=>nil}, #<LogStash::Event:0x73131863>], :response=>{"index"=>{"_index"=>"wazuh-alerts-3.x-2018.06.14", "_type"=>"wazuh", "_id"=>"kp54_mMB86eT4RWzM1CG", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse [host]", "caused_by"=>{"type"=>"illegal_state_exception", "reason"=>"Can't get text on a START_OBJECT at 1:114"}}}}}
[2018-06-14T13:24:46,303][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"wazuh-alerts-3.x-2018.06.14", :_type=>"wazuh", :_routing=>nil}, #<LogStash::Event:0x6f1b1dab>], :response=>{"index"=>{"_index"=>"wazuh-alerts-3.x-2018.06.14", "_type"=>"wazuh", "_id"=>"lZ54_mMB86eT4RWzSlBY", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse [host]", "caused_by"=>{"type"=>"illegal_state_exception", "reason"=>"Can't get text on a START_OBJECT at 1:114"}}}}}
[2018-06-14T13:24:46,304][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"wazuh-alerts-3.x-2018.06.14", :_type=>"wazuh", :_routing=>nil}, #<LogStash::Event:0x39da1d6b>], :response=>{"index"=>{"_index"=>"wazuh-alerts-3.x-2018.06.14", "_type"=>"wazuh", "_id"=>"lp54_mMB86eT4RWzSlBY", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse [host]", "caused_by"=>{"type"=>"illegal_state_exception", "reason"=>"Can't get text on a START_OBJECT at 1:114"}}}}}
My actual logstash filter and output is:
input {
beats {
port => 5000
codec => "json_lines"
}
}
filter {
if [data][srcip] {
mutate {
add_field => [ "@src_ip", "%{[data][srcip]}" ]
}
}
if [data][aws][sourceIPAddress] {
mutate {
add_field => [ "@src_ip", "%{[data][aws][sourceIPAddress]}" ]
}
}
}
filter {
geoip {
source => "@src_ip"
target => "GeoLocation"
fields => ["city_name", "country_name", "region_name", "location"]
}
date {
match => ["timestamp", "ISO8601"]
target => "@timestamp"
}
mutate {
remove_field => [ "timestamp", "beat", "input_type", "tags", "count", "@version", "log", "offset", "type","@src_ip"]
}
}
output {
elasticsearch {
hosts => ["
127.0.0.1:9200"]
index => "wazuh-alerts-3.x-%{+YYYY.MM.dd}"
document_type => "wazuh"
user => "logger"
password => "mypass"
}
}
Any idea?