# Filebeat
# 官网
# 常用命令
# 测试filebeat启动后,查看相关输出信息:
./filebeat -e -c filebeat.yml -d "publish"
1
# 后台方式启动filebeat
nohup ./filebeat -e -c filebeat.yml >/dev/null 2>&1 &
1
# 停止
ps -ef |grep filebeat
1
kill -9 进程号
1
# filebeat.yml 配置文件说明
filebeat.inputs:
- type: log
enabled: true
paths:
- /data/service/**/logs/*.log
# 只收集有关键字的
include_lines: ['操作日志开始']
#- c:\programdata\elasticsearch\logs\*
#================================ Outputs =====================================
# Configure what output to use when sending the data collected by the beat.
#-------------------------- Elasticsearch output ------------------------------
#output.elasticsearch:
# Array of hosts to connect to.
# hosts: ["172.210.1.181:9200"]
# Optional protocol and basic auth credentials.
#protocol: "https"
# username: "elastic"
# password: "qwerasdf"
# index: "nginx_%{+yyyyMMdd}"
#setup.template.name: "nginx"
#setup.template.pattern: "nginx_*"
#setup.ilm.enabled: false
########################################## 输出到kafka
output.kafka:
hosts: ["172.30.196.235:9092"]
topic: 'JarLogs'
partition.round_robin:
reachable_only: false
required_acks: 1
compression: gzip
max_message_bytes: 1000000
########################################## 输出到redis
output.redis:
hosts: ["***"]
password: "***"
key: "filebeat"
db: 9
timeout: 5
processors:
- add_host_metadata: ~
- add_cloud_metadata: ~
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51