blob: 3b4fd768c3502ce687b555d098f1421d0b0141d0 [file] [log] [blame]
Itay Hasside2da86d2017-08-24 12:54:42 +00001input {
2 beats {
3
4 ## Add a id to plugin configuration. Can be anything unique.
5 id => 'beats_plugin'
6
7 ######## Connection configurations ########
8
9 ## The port to listen on.
BorislavG5f3b6192018-03-25 18:12:38 +030010 port => {{.Values.service.externalPort}}
Itay Hasside2da86d2017-08-24 12:54:42 +000011
12 ## Close Idle clients after the specified time in seconds. Default is 60 seconds
13 #client_inactivity_timeout => 60
14
15 ######## Security configurations ########
16
17 ## Enable encryption. Default false.
18 #ssl => $filebeat_ssl
19
20 ## ssl certificate path.
21 #ssl_certificate => $filebeat_ssl_certificate
22
23 ## SSL key to use.
24 #ssl_key => $filebeat_ssl_key
25
26 ##SSL key passphrase to use.
27 #ssl_key_passphrase => $filebeat_ssl_key_passphrase
28
29 ## Value can be any of: none, peer, force_peer.
30 #ssl_verify_mode => $filebeat_ssl_verify_mode
31
32 ## Time in milliseconds for an incomplete ssl handshake to timeout. Default is 10000 ms.
33 #ssl_handshake_timeout => 10000
34 include_codec_tag => false
35 }
36}
37
38
39filter {
40 # Filter for log4j xml events
41 if "</log4j:event>" in [message] {
42 #Filter to parse xml event and retrieve data
43 xml {
44 source => "message"
45 store_xml => false
46 remove_namespaces => true
47 target => "xml_content"
48 xpath => [ "/event/message/text()", "logmsg" ,
49 "/event/@logger", "Logger",
50 "/event/@timestamp", "Timestamp",
51 "/event/@level", "loglevel",
52 "/event/@thread", "Thread",
53 "/event/throwable/text()", "Exceptionthrowable",
54 "/event/NDC/text()", "NDCs",
55 "/event/properties/data/@name","mdcname",
56 "/event/properties/data/@value","mdcvalue"]
57
58 }
59
60 #Ruby filter to iterate and separate MDCs into documents
61 ruby {
62 code => '
63 $i = 0
64 $num = 0
65 if event.get("[mdcname]")
66 $num = event.get("[mdcname]").length
67 end
68 if $num != 0
69 until $i > $num do
70 if event.get("[mdcname]").at($i) and event.get("[mdcvalue]").at($i)
71 event.set(event.get("[mdcname]").at($i), event.get("[mdcvalue]").at($i))
72 end
73 $i=$i+1
74 end
75 end
76 '
77 }
78
79 #Validations
80 if [Exceptionthrowable]
81 {
82 mutate {
83 replace => {
84 "exceptionmessage" => "%{[Exceptionthrowable]}"
85 }
86 }
87 }
88
89 if [NDCs]
90 {
91 mutate {
92 replace => {
93 "NDC" => "%{[NDCs]}"
94 }
95 }
96 }
97
98 mutate {
99 replace => {
100 "Logger" =>"%{[Logger]}"
101 "logmsg" =>"%{[logmsg]}"
102 "Timestamp" =>"%{[Timestamp]}"
103 "loglevel" =>"%{[loglevel]}"
104 "message" => "%{logmsg}"
105 "Thread" => "%{[Thread]}"
106 }
107 remove_field => ["mdcname", "mdcvalue", "logmsg","Exceptionthrowable","NDCs"]
108 }
109
110 if [Timestamp]
111 {
112 date {
113 match => ["Timestamp", "UNIX_MS"]
114 target => "Timestamp"
115 }
116 }
117 }
118 # Filter for logback events
119 else {
shanedanielfa847102017-10-25 16:03:42 -0400120
121# mutate { add_field => { "orgmsg" => "%{message}" } } # Copy of orginal msg for debug
122
Itay Hasside2da86d2017-08-24 12:54:42 +0000123 mutate {
124 gsub => [
shanedanielfa847102017-10-25 16:03:42 -0400125 'message', ' = ', '=',
Itay Hasside2da86d2017-08-24 12:54:42 +0000126 'message', '= ', '=null',
127 'message', '=\t', '=null ', #This null is followed by a tab
128 'message', '\t$', '\t'
129 ]
130 }
Itay Hasside2da86d2017-08-24 12:54:42 +0000131 grok {
132 break_on_match => false
133 match => {
shanedanielfa847102017-10-25 16:03:42 -0400134 "message" => ["%{TIMESTAMP_ISO8601:Timestamp}\t%{GREEDYDATA:Thread}\t%{SPACE}%{LOGLEVEL:loglevel}%{SPACE}\t%{JAVACLASS:Logger}\t(?:[^\t]+\t)*%{GREEDYDATA:message}",
135 "(?<MDCs>.*\t)"
136 ]
137 "source" => ["/var/log/onap/(?<componentName>[^/]+)/",
138 "/var/log/onap/%{GREEDYDATA:componentLogFile}"
139 ]
Itay Hasside2da86d2017-08-24 12:54:42 +0000140 }
141 overwrite => ["message"]
142 }
shanedanielfa847102017-10-25 16:03:42 -0400143 kv {
144 source => "MDCs"
145 field_split => "\t"
146 trim_key => "\s"
147 trim_value => "\s"
148 remove_field => [ "MDCs" ]
149 }
150
151 date {
152 match => [ "Timestamp", "ISO8601", "yyyy-MM-dd HH:mm:ss,SSS" ]
153 target => "Timestamp"
154 }
155
156 if [source] == "/var/log/onap/aai/aai-ml/metrics.log" {
157 csv {
158 source => "message"
159 separator => "|"
160 quote_char => "`"
161 columns => ["Begin TS", "End TS", "DuplicateRequestID", "Unknown1", "threadID", "phys/virt server name", "service name", "Partner Name", "Unknown2", "Unknown3", "Unknown4", "Unknown5", "Unknown6", "Unknown7", "Log level", "Unknown8", "Unknown9", "Status code", "Server", "Unknown10", "Unknown11", "Unknown12", "Unknown13", "Unknown14", "Unknown15", "Unknown16", "Unknown17", "Unknown18", "message"]
162 }
163 }
164 else if [source] == "/var/log/onap/aai/aai-ml/audit.log" {
165 csv {
166 source => "message"
167 separator => "|"
168 quote_char => "`"
169 columns => ["Begin TS", "End TS", "DuplicateRequestID", "Unknown1", "threadID", "phys/virt server name", "service name", "Partner Name", "Unknown2", "Unknown3", "Unknown4", "Unknown5", "Log level", "Unknown6", "Unknown7", "Status code", "Server", "Unknown10", "Unknown11", "Unknown12", "Unknown13", "Unknown14", "Unknown15", "Unknown16", "Unknown17", "message"]
170 }
171 }
172
173 mutate {
174 remove_field => ["DuplicateRequestID", "Unknown1", "Unknown2", "Unknown3", "Unknown4", "Unknown5", "Unknown6", "Unknown7", "Unknown8", "Unknown9", "Unknown10", "Unknown11", "Unknown12", "Unknown13", "Unknown14", "Unknown15", "Unknown16", "Unknown17", "Unknown18"]
175 }
176
177 if ([source] == "/var/log/onap/sdc/sdc-be/audit.log") {
178 #Parse kvps in message
179 kv {
180 field_split => "\s"
181 trim_key => "\s"
182 trim_value => "\s"
183 }
184
185 #If Request Id is missing and DID is present use as RequestId
186 if (![RequestId] and [DID] =~ /.+/) {
187 mutate { add_field => { "RequestId" => "%{DID}" } }
188 }
189 }
190
191 } #Close else statement for logback events
192} #Close filter
Itay Hasside2da86d2017-08-24 12:54:42 +0000193
194
195output {
196 elasticsearch {
197 id => 'onap_es'
198
199 ######### Security configurations #########
200
201 user => "elastic"
202 password => "changeme"
203
shanedanielfa847102017-10-25 16:03:42 -0400204 ## The .cer or .pem file to validate the server's certificate
Itay Hasside2da86d2017-08-24 12:54:42 +0000205 #cacert => $es_cacert
206
207 ## The keystore used to present a certificate to the server. It can be either .jks or .p12
208 #keystore => $es_keystore
209 #keystore_password => $es_keystore_password
210
211 ## Enable SSL/TLS secured communication to Elasticsearch cluster.
212 ## Default is not set which in that case depends on the protocol specidfied in hosts list
213 #ssl => $es_ssl
214
shanedanielfa847102017-10-25 16:03:42 -0400215 ## Option to validate the server's certificate. Default is true
Itay Hasside2da86d2017-08-24 12:54:42 +0000216 #ssl_certificate_verification => $es_ssl_certificate_verification
217
shanedanielfa847102017-10-25 16:03:42 -0400218 ## The JKS truststore to validate the server's certificate.
Itay Hasside2da86d2017-08-24 12:54:42 +0000219 #truststore => $es_truststore
220 #truststore_password => $es_truststore_password
221
222
223 ######### Elasticsearchcluster and host configurations #########
224
BorislavG5f3b6192018-03-25 18:12:38 +0300225 ##can specify one or a list of hosts. If sniffing is set, one is enough and others will be auto-discovered
226 hosts => ["http://{{.Values.config.elasticsearchServiceName}}.{{.Release.Namespace}}:{{.Values.config.elasticsearchPort}}"]
Itay Hasside2da86d2017-08-24 12:54:42 +0000227
228
229 ## This setting asks Elasticsearch for the list of all cluster nodes and adds them to the hosts list. Default is false.
230 sniffing => true
231
232 ## How long to wait, in seconds, between sniffing attempts. Default is 5 seconds.
233 #sniffing_delay => 5
234
235 ## Set the address of a forward HTTP proxy.
236 #proxy => $es_proxy
237
238 ##Use this if you must run Elasticsearch behind a proxy that remaps the root path for the Elasticsearch HTTP API lives
239 #path => $es_path
240
241 ######### Elasticsearch request configurations #########
242
243 ## This setting defines the maximum sized bulk request Logstash will make.
244 #flush_size => ?
245
246 ######### Document configurations #########
247
shanedaniel1cdda182017-11-17 18:21:00 +0000248 index => "logstash-%{+YYYY.MM.dd}"
Itay Hasside2da86d2017-08-24 12:54:42 +0000249 document_type => "logs"
250
251 ## This can be used to associate child documents with a parent using the parent ID.
252 #parent => "abcd'
253 }
254}
255