(1)04-log-to-logstash.yaml
# 指定input插件的配置
filebeat.inputs:
# 类型为log
- type: log
# 指定日志的路径
paths:
- /tmp/filebeat/**/*.log
# 是否启用该配置
enabled: true
# 给event打标签
tags: ["json"]
# 给event添加字段
fields:
school: baimei
class: dashu
# 将自定义添加的字段放在顶级字段中,若不指定,则默认存储在"fields"下级字段。
fields_under_root: true
- type: log
paths:
- /tmp/filebeat/**/*.txt
enabled: false
# 将数据发送到logstash
output.logstash:
hosts: ["10.0.0.102:8888"]
(2)05-log_processors-to-logstash.yaml
filebeat.inputs:
- type: log
paths: ["/tmp/data/apps/*.bulk"]
processors:
# 添加字段
- add_fields:
# 指定将字段放在哪个位置,若不指定,则默认值为"fields"
# 指定为""时,则放在顶级字段中
# target: baimei-dashu
target: ""
fields:
name: wangshuo
hobby: "抽烟,喝酒,烫头"
# 刪除事件
- drop_event:
# 删除message字段中包含create字样的事件(event)
when:
contains:
message: "create"
# 解码json格式数据
- decode_json_fields:
fields: ["message"]
target: ""
# 删除指定字段,但无法删除filebeat内置的字段,若真有这个需求,请在logstash中删除即可。
#- drop_fields:
# # fields: ["@version","agent","host","input","ecs","log","tags"]
# fields: ["name","hobby"]
# ignore_missing: false
# 对字段进行重命名
- rename:
fields:
- from: "name"
to: "name2023"
output.logstash:
hosts: ["10.0.0.102:8888"]
(3)06-log-to-es.yaml
filebeat.inputs:
- type: log
paths: ["/tmp/apps/*.bulk"]
processors:
- decode_json_fields:
fields: ["message"]
target: ""
output.elasticsearch:
# hosts: ["http://10.0.0.101:19200","http://10.0.0.102:19200","http://10.0.0.103:19200"]
hosts:
- "http://10.0.0.101:19200"
- "http://10.0.0.102:19200"
- "http://10.0.0.103:19200"
欢迎来撩 : 汇总all