-
Notifications
You must be signed in to change notification settings - Fork 4
/
nodejs-fluentd-config.yml
164 lines (162 loc) · 4.75 KB
/
nodejs-fluentd-config.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
apiVersion: v1
kind: ConfigMap
metadata:
name: nodejs-fluentd-config
namespace: default
data:
td-agent.conf: |
####
## Output descriptions:
##
# Treasure Data (http://www.treasure-data.com/) provides cloud based data
# analytics platform, which easily stores and processes data from td-agent.
# FREE plan is also provided.
# @see http://docs.fluentd.org/articles/http-to-td
#
# This section matches events whose tag is td.DATABASE.TABLE
####
## Source descriptions:
##
## built-in TCP input
## @see http://docs.fluentd.org/articles/in_forward
<source>
@type forward
@id input_forward
</source>
<source>
@type http
@id input_http
port 8889
</source>
## live debugging agent
<source>
@type debug_agent
@id input_debug_agent
bind 127.0.0.1
port 24230
</source>
<source>
@type tail
format json
path /var/log/nodejs/log*
pos_file /var/log/td-agent/pos/nodejs_logs.pos
read_from_head true
tag nodejs
</source>
<filter fluent.*>
@type grep
<exclude>
key tag
pattern /fluent\.trace/
</exclude>
</filter>
<filter nodejs>
@type typecast
types ContentLength:integer,ResponseTime:float
</filter>
<filter nodejs>
@type grep
<exclude>
key Path
pattern /healthchecks/
</exclude>
</filter>
<filter nodejs>
@type parser
key_name message
reserve_data true
remove_key_name_field true
<parse>
@type json
json_parser json
</parse>
</filter>
<filter nodejs>
@type geoip
# Specify one or more geoip lookup field which has ip address (default: host)
geoip_lookup_keys IP
# Specify optional geoip database (using bundled GeoLiteCity databse by default)
# geoip_database "/path/to/your/GeoIPCity.dat"
# Specify optional geoip2 database
# geoip2_database "/path/to/your/GeoLite2-City.mmdb" (using bundled GeoLite2-City.mmdb by default)
# Specify backend library (geoip2_c, geoip, geoip2_compat)
backend_library geoip2_c
# Set adding field with placeholder (more than one settings are required.)
<record>
city ${city.names.en["IP"]}
latitude ${location.latitude["IP"]}
longitude ${location.longitude["IP"]}
country ${country.iso_code["IP"]}
country_name ${country.names.en["IP"]}
postal_code ${postal.code["IP"]}
region_code ${subdivisions.0.iso_code["IP"]}
region_name ${subdivisions.0.names.en["IP"]}
location_properties '{ "lat" : ${location.latitude["IP"]}, "lon" : ${location.longitude["IP"]} }'
location_string ${location.latitude["IP"]},${location.longitude["IP"]}
location_array '[${location.longitude["IP"]},${location.latitude["IP"]}]'
</record>
# To avoid get stacktrace error with `[null, null]` array for elasticsearch.
skip_adding_null_record true
# Set @log_level (default: warn)
@log_level info
</filter>
<match td.*.*>
@type tdlog
@id output_td
apikey YOUR_API_KEY
auto_create_table
<buffer>
@type file
path /var/log/td-agent/buffer/td
</buffer>
<secondary>
@type file
path /var/log/td-agent/failed_records
</secondary>
</match>
## match tag=debug.** and dump to console
<match debug.**>
@type stdout
@id output_stdout
</match>
<match nodejs>
@type copy
<store>
@type elasticsearch
@log_level trace
time_key @timestamp
include_timestamp true
include_tag_key true
reconnect_on_error true
reload_on_failure true
reload_connections false
request_timeout 120s
host nodejs-es-internal-http
port 9200
scheme https
ssl_verify false
ssl_version TLSv1_2
ca_file "/fluentd/elastic/tls.crt"
user elastic
password P@$$w0rd
flush_interval 1s
# index_name fluentd.${tag}.%Y%m%d
type_name _doc
logstash_format true # Setting this option to true will ignore the index_name setting. The default index name prefix is logstash-.
logstash_prefix nodejsrestapi.logs
template_name access_log_template
template_file /tmp/access_log_template.json
template_overwrite true
<buffer tag, time>
timekey 60# chunks per hours ("3600" also available)
</buffer>
</store>
<store>
@type stdout
</store>
</match>
<label @ERROR>
<match **>
@type stdout
</match>
</label>