.
├── config
│ └── filebeat.docker.yml
├── filebeat
│ └── registry
│ └── filebeat
│ ├── data.json
│ └── meta.json
└── setup.sh
#!/bin/bash
echo 'Asia/Shanghai' > /etc/timezone
docker stop filebeat
docker rm filebeat
docker run -d \
--net host \
-m 1024M \
--restart=always \
--name=filebeat \
--hostname=$(hostname) \
--user=root \
--volume="$(pwd)/config/filebeat.docker.yml:/usr/share/filebeat/filebeat.yml:ro" \
-v /etc/timezone:/etc/timezone:ro \
-v /etc/localtime:/etc/localtime:ro \
-v /data/logs/:/data/logs/ \
-v ./filebeat/registry/:/usr/share/filebeat/data/registry/ \
docker.elastic.co/beats/filebeat:7.0.0 filebeat -e -strict.perms=false \
#setup -E setup.kibana.host=kibana:5601 \
#-E output.elasticsearch.hosts=["elasticsearch:9200"]
#docker exec -it filebeat /bin/bash
#docker logs --tail=200 -f filebeat
filebeat.config:
modules:
path: ${path.config}/modules.d/*.yml
reload.enabled: false
processors:
- add_cloud_metadata: ~
filebeat.inputs:
# nginx日志
- type: log
paths:
- /data/logs/nginx/json_*.log
fields:
type: "test_nginxlog"
saltid: {{ grains['id'] }}
appType: nginx
env: test
object: "未分类"
fields_under_root: true
# mysql慢查询
- type: log
enable: yes
name: "mysql-slow-query-log"
paths:
- /data/app/mysql/log-3306/mysql-slow.log
multiline:
pattern: '^\# Time:' # 匹配行以 # Time: 开头的模式
negate: true # 不匹配时合并
match: after # 合并到上一行的末尾
max_lines: 500 # 最大的行数
timeout: 30s # 如果在规定的时间内没有新的日志事件就不等待后续日志
fields:
type: "mysql_slow_log"
saltid: {{ grains['id'] }}
databaseid: "esxi-vm-mysql-006"
fields_under_root: true
# pg慢查询
- type: log
paths:
- /data/app/postgresql/data-5432/pgdata/log/*.log
fields:
type: "pg_slow_log"
saltid: {{ grains['id'] }}
databaseid: "ops-jhj-pg"
fields_under_root: true
output.kafka:
# initial brokers for reading cluster metadata
hosts: ["10.0.18.2:9092"]
# message topic selection + partitioning
topics:
- topic: "test_nginxlog"
when.equals:
type: "test_nginxlog"
partition.round_robin:
reachable_only: false
required_acks: 1
compression: gzip
max_message_bytes: 1000000
windows
用filebeat
发到kafka
, 需要和rsyslog
分开topic
, 用vector
不同的逻辑处理