gpt4 book ai didi

elasticsearch - Logstash:映射中的字段顺序错误

转载 作者:行者123 更新时间:2023-12-03 01:13:00 25 4
gpt4 key购买 nike

我有这样的logstash配置:

input {
beats {
port => '5044'
}
}
filter {
grok {
patterns_dir => ['./patterns']
match => { 'message' => '%{TIME:timestamp}(","Event\s)(?<Event>([^"]+))(","Form\s)?(?<Form>([^"]+))?(","ParentType\s)?(?<parent_type>([^"]+))?(","ParentName\s)?(?<parent_name>([^"]+))?(","Type\s)?(?<type>([^"]+))?(","Name\s)?(?<Name_of_form>([^"]+))?(","Detail\s)?(?<Detail>([^"]+))?(","t=)?(?<t>([\d]+))?(",")?(?<Status>(end|beg))?' }
add_tag => [ '%{Status}' ]
}
dissect {
mapping => {
'[log][file][path]' => 'C:\Logs\%{login}\%{server}\%{user}\%{filename}.txt'
}
}
date {
match => [ 'timestamp', 'dd.MM.yyyy HH:mm:ss' ]
}
mutate {
add_field => { 'uniqueEvent' => '%{Event}_%{filename}' }
}
elapsed {
unique_id_field => 'uniqueEvent'
start_tag => 'beg'
end_tag => 'end'
new_event_on_match => false
}

if 'elapsed' in [tags] {
aggregate {
task_id => '%{uniqueEvent}'
code => 'map["duration"] = [(event.get("elapsed_time")*1000).to_i]'
map_action => 'create'
}
}
mutate {
remove_field => ['timestamp', 'ecs', 'log', 'tags', 'message', '@version', 'filename', 'input', 'host', 'agent', 't', 'parent_type', 'parent_name', 'type']
rename => {'elapsed_time' => 'Event_duration'}
}
}
output {
elasticsearch {
hosts => ['localhost:9200']
index => 'logs-%{+dd.MM.YYYY}'
}
}
在elasticsearch中,我需要的字段总是以不同的顺序显示,这有点令人发指。我只有几个文本字段,两个日期字段和一个数字字段。 Elasticsearch本身可以完美地定义类型,但是我希望所有数据在每个索引中都严格按顺序排列。能做到吗?
映射应遵循严格的顺序:
"mappings": {
"properties": {
"@timestamp": {
"type": "date"
},
"Event": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"Form": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"Name_of_form": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"Detail": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"Event_duration": {
"type": "float"
},
"elapsed_timestamp_start": {
"type": "date"
},
"user": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"login": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"server": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"Status": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"uniqueEvent": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
}
}
}
我尝试根据有关 this主题的第一个也是唯一的指南来实现此目的,但是没有任何效果。根据其他人的文章,我在logstash目录中创建了template.json,但启动该模板后无法正常工作,错误的类型为:该模板不在elasticsearch中。
如何正确创建此文件?如何将其正确写入输入块?这一切都行吗?
谢谢你的帮助!
错误仍然是这样的:
[2020-08-25T19:33:13,365][INFO ][logstash.outputs.elasticsearch][main] Using mapping template from {:path=>"C:\\Users\\moral\\elk_stack\\logstash-7.8.1\\template\\template.json"}
[2020-08-25T19:33:13,406][INFO ][logstash.outputs.elasticsearch][main] Attempting to install template {:manage_template=>{"template"=>"logs-*", "version"=>50001, "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"_default_"=>{"properties"=>{"@timestamp"=>{"type"=>"date"}, "Event"=>{"type"=>"text", "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}, "Form"=>{"type"=>"text", "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}, "Name_of_form"=>{"type"=>"text", "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}, "Detail"=>{"type"=>"text", "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}, "Event_duration"=>{"type"=>"float"}, "elapsed_timestamp_start"=>{"type"=>"date"}, "user"=>{"type"=>"text", "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}, "login"=>{"type"=>"text", "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}, "server"=>{"type"=>"text", "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}, "Status"=>{"type"=>"text", "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}, "uniqueEvent"=>{"type"=>"text", "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}}}}
[2020-08-25T19:33:13,421][INFO ][logstash.outputs.elasticsearch][main] Installing elasticsearch template to _template/log-template
[2020-08-25T19:33:13,468][ERROR][logstash.outputs.elasticsearch][main] Failed to install template. {:message=>"Got response code '400' contacting Elasticsearch at URL 'http://localhost:9200/_template/log-template'", :class=>"LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError", :backtrace=>["C:/Users/moral/elk_stack/logstash-7.8.1/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.5.1-java/lib/logstash/outputs/elasticsearch/http_client/manticore_adapter.rb:80:in `perform_request'", "C:/Users/moral/elk_stack/logstash-7.8.1/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.5.1-java/lib/logstash/outputs/elasticsearch/http_client/pool.rb:332:in `perform_request_to_url'", "C:/Users/moral/elk_stack/logstash-7.8.1/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.5.1-java/lib/logstash/outputs/elasticsearch/http_client/pool.rb:319:in `block in perform_request'", "C:/Users/moral/elk_stack/logstash-7.8.1/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.5.1-java/lib/logstash/outputs/elasticsearch/http_client/pool.rb:414:in `with_connection'", "C:/Users/moral/elk_stack/logstash-7.8.1/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.5.1-java/lib/logstash/outputs/elasticsearch/http_client/pool.rb:318:in `perform_request'", "C:/Users/moral/elk_stack/logstash-7.8.1/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.5.1-java/lib/logstash/outputs/elasticsearch/http_client/pool.rb:326:in `block in Pool'", "C:/Users/moral/elk_stack/logstash-7.8.1/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.5.1-java/lib/logstash/outputs/elasticsearch/http_client.rb:352:in `template_put'", "C:/Users/moral/elk_stack/logstash-7.8.1/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.5.1-java/lib/logstash/outputs/elasticsearch/http_client.rb:86:in `template_install'", "C:/Users/moral/elk_stack/logstash-7.8.1/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.5.1-java/lib/logstash/outputs/elasticsearch/template_manager.rb:28:in `install'", "C:/Users/moral/elk_stack/logstash-7.8.1/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.5.1-java/lib/logstash/outputs/elasticsearch/template_manager.rb:16:in `install_template'", "C:/Users/moral/elk_stack/logstash-7.8.1/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.5.1-java/lib/logstash/outputs/elasticsearch/common.rb:205:in `install_template'", "C:/Users/moral/elk_stack/logstash-7.8.1/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.5.1-java/lib/logstash/outputs/elasticsearch/common.rb:49:in `block in setup_after_successful_connection'"]}
template.json文件:
{
"template" : "logs-*",
"version" : 50001,
"settings" : {
"index.refresh_interval" : "5s"
},
"mappings" : {
"_default_" : {
"properties": {
"@timestamp": {
"type": "date"
},
"Event": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"Form": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"Name_of_form": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"Detail": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"Event_duration": {
"type": "float"
},
"elapsed_timestamp_start": {
"type": "date"
},
"user": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"login": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"server": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"Status": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"uniqueEvent": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
}
}
}
}
}

最佳答案

您需要做的就是简单地告诉Logstash在启动时安装模板。只需确保模板中的index_patterns与您的索引名称匹配即可(即logs-*)

output {
elasticsearch {
hosts => ['localhost:9200']
index => 'logs-%{+dd.MM.YYYY}'

manage_template => true
template_overwrite => true
template_name => "log-template"
template => "/path/to/template.json"
}
}

关于elasticsearch - Logstash:映射中的字段顺序错误,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/63578476/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com