logstash字符转换

项目需求

request_time, upstream_response_time 等的数据类型需要改为float

1、nginx配置json格式日志

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
http {
log_format json '{"@timestamp":"$time_iso8601",'
'"server_addr":"$server_addr",'
'"remote_addr":"$remote_addr",'
'"cookie_JSESSIONID":"$cookie_JSESSIONID",'
'"body_bytes_sent":$body_bytes_sent,'
'"request_uri":"$request_uri",'
'"request_method":"$request_method",'
'"server_protocol":"$server_protocol",'
'"scheme":"$scheme",'
'"request_time":$request_time,'
'"upstream_response_time":"$upstream_response_time",'
'"upstream_addr":"$upstream_addr",'
'"hostname":"$hostname",'
'"http_host":"$http_host",'
'"uri":"$uri",'
'"http_x_forwarded_for":"$http_x_forwarded_for",'
'"http_referer":"$http_referer",'
'"http_user_agent":"$http_user_agent",'
'"X-Forwarded-Proto":"$http_x_forwarded_proto",'
'"cookie":"$http_cookie",'
'"status":"$status"}';

server {
listen 80;
access_log logs/pc.access.log json;

2、filebeat收集日志

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
filebeat.inputs:

- type: log
enabled: true
paths:
- /data/nginx/*.log
json.keys_under_root: true
json.overwrite_keys: true
encoding: utf-8
fields:
document_type: nginx_access_log

output.logstash:
hosts: ["localhost:5044"]

#输出到es中
#output.elasticsearch:
# hosts: ["192.168.1.30:9200"]
# index: "nginx-%{+YYYY-MM-dd}"
#setup.template.name: "access"
#setup.template.pattern: "access-*"

3、logstash配置

需java环境

配置startup.options文件

1
2
3
4
5
6
7
8
9
10
11
LS_HOME=/opt/logstash-6.5.3
LS_SETTINGS_DIR="${LS_HOME}/config"
LS_OPTS="--path.settings ${LS_SETTINGS_DIR}"
LS_PIDFILE=/var/run/logstash.pid
LS_USER=elk
LS_GROUP=elk
LS_GC_LOG_FILE=/opt/logstash-6.5.3/logs/gc.log
LS_OPEN_FILES=16384
LS_NICE=19
SERVICE_NAME="logstash"
SERVICE_DESCRIPTION="logstash"

配置template_nginxlog.json模板

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
{

"index_patterns" : ["nginx-*"],

"version" : 60001,

"settings" : {

"index.refresh_interval" : "5s",

"number_of_shards": 1

},

"mappings" : {

"_doc" : {

"dynamic_templates" : [ {

"message_field" : {

"path_match" : "message",

"match_mapping_type" : "string",

"mapping" : {

"type" : "text",

"norms" : false

}

}

}, {

"string_fields" : {

"match" : "*",

"match_mapping_type" : "string",

"mapping" : {

"type" : "text", "norms" : false,

"fields" : {

"keyword" : { "type": "keyword", "ignore_above": 2048 }

}

}

}

} ],

"properties" : {

"@timestamp": { "type": "date"},

"@version": { "type": "keyword"},

"geoip" : {

"dynamic": true,

"properties" : {

"ip": { "type": "ip" },

"location" : { "type" : "geo_point" },

"latitude" : { "type" : "half_float" },

"longitude" : { "type" : "half_float" }

}

}

}

}

}

}

编写conf配置文件

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
input {
beats {
port => 5044
}
}


filter {
if [fields][document_type] == "nginx_access_log" {
mutate {
gsub => ["message", "\\x", "\\\x"]
}
json {
source => "message"
}


if "-" in [upstream_response_time] {
mutate {
replace => {
"upstream_response_time" => "0"
}
}
}

mutate {
convert => [ "upstream_response_time", "float" ]
}
mutate {
gsub => [
"cookie", "\\x22", '"'
]
gsub => [
"cookie", "\\x0A", "\n"
]
}
}
}


output {
if [fields][document_type] == "nginx_access_log" {
elasticsearch {
hosts => ["192.168.1.30:9200"]
index => 'nginx-%{+YYYY-MM-dd}'
template => "/opt/logstash-6.5.3/config/template_nginxlog.json"
#template_name => "nginxlog"
template_overwrite => true
document_type => "_doc" #es报错文件格式不匹配
}
}
}

es报错信息,文件格式不匹配

1
failed to put mappings on indices [[[nginx-2020-11-18/ueZB7rY1QPilUWjk3S9ykg]]], type [doc]java.lang.IllegalArgumentException: Rejecting mapping update to [nginx-2020-11-18] as the final mapping would have more than 1 type: [_doc, doc]