blob: d6b0696b81915d90241d2db9d78a25cd4a2ef4e9 [file] [log] [blame]
Jakub Latusekc586acb2020-10-21 13:36:29 +02001{{/*
Mukul7de56c82018-09-04 08:03:27 +00002# Copyright © 2018 AT&T, Amdocs, Bell Canada Intellectual Property. All rights reserved.
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
Jakub Latusekc586acb2020-10-21 13:36:29 +020015*/}}
Itay Hasside2da86d2017-08-24 12:54:42 +000016input {
17 beats {
18
19 ## Add a id to plugin configuration. Can be anything unique.
20 id => 'beats_plugin'
21
22 ######## Connection configurations ########
23
24 ## The port to listen on.
BorislavG5f3b6192018-03-25 18:12:38 +030025 port => {{.Values.service.externalPort}}
Itay Hasside2da86d2017-08-24 12:54:42 +000026
27 ## Close Idle clients after the specified time in seconds. Default is 60 seconds
28 #client_inactivity_timeout => 60
29
30 ######## Security configurations ########
31
32 ## Enable encryption. Default false.
33 #ssl => $filebeat_ssl
34
35 ## ssl certificate path.
36 #ssl_certificate => $filebeat_ssl_certificate
37
38 ## SSL key to use.
39 #ssl_key => $filebeat_ssl_key
40
41 ##SSL key passphrase to use.
42 #ssl_key_passphrase => $filebeat_ssl_key_passphrase
43
44 ## Value can be any of: none, peer, force_peer.
45 #ssl_verify_mode => $filebeat_ssl_verify_mode
46
47 ## Time in milliseconds for an incomplete ssl handshake to timeout. Default is 10000 ms.
48 #ssl_handshake_timeout => 10000
49 include_codec_tag => false
50 }
51}
52
53
54filter {
rootc9c35032018-04-06 18:39:22 +000055 grok {
56 break_on_match => false
57 match => {
58 "source" => ["/var/log/onap/(?<componentName>[^/]+)/",
59 "/var/log/onap/%{GREEDYDATA:componentLogFile}"
60 ]
61 }
62 }
63
Itay Hasside2da86d2017-08-24 12:54:42 +000064 # Filter for log4j xml events
65 if "</log4j:event>" in [message] {
rootc9c35032018-04-06 18:39:22 +000066
67 #mutate { add_field => { "orgmsg_log4j" => "%{message}" } } # Copy of orginal msg for debug
68
Itay Hasside2da86d2017-08-24 12:54:42 +000069 #Filter to parse xml event and retrieve data
70 xml {
71 source => "message"
72 store_xml => false
73 remove_namespaces => true
74 target => "xml_content"
75 xpath => [ "/event/message/text()", "logmsg" ,
76 "/event/@logger", "Logger",
77 "/event/@timestamp", "Timestamp",
78 "/event/@level", "loglevel",
79 "/event/@thread", "Thread",
80 "/event/throwable/text()", "Exceptionthrowable",
81 "/event/NDC/text()", "NDCs",
82 "/event/properties/data/@name","mdcname",
83 "/event/properties/data/@value","mdcvalue"]
84
85 }
86
87 #Ruby filter to iterate and separate MDCs into documents
88 ruby {
89 code => '
90 $i = 0
91 $num = 0
92 if event.get("[mdcname]")
93 $num = event.get("[mdcname]").length
94 end
95 if $num != 0
96 until $i > $num do
97 if event.get("[mdcname]").at($i) and event.get("[mdcvalue]").at($i)
98 event.set(event.get("[mdcname]").at($i), event.get("[mdcvalue]").at($i))
99 end
100 $i=$i+1
101 end
102 end
103 '
104 }
105
106 #Validations
107 if [Exceptionthrowable]
108 {
109 mutate {
110 replace => {
111 "exceptionmessage" => "%{[Exceptionthrowable]}"
112 }
113 }
114 }
115
116 if [NDCs]
117 {
118 mutate {
119 replace => {
120 "NDC" => "%{[NDCs]}"
121 }
122 }
123 }
124
125 mutate {
126 replace => {
127 "Logger" =>"%{[Logger]}"
128 "logmsg" =>"%{[logmsg]}"
129 "Timestamp" =>"%{[Timestamp]}"
130 "loglevel" =>"%{[loglevel]}"
131 "message" => "%{logmsg}"
132 "Thread" => "%{[Thread]}"
133 }
134 remove_field => ["mdcname", "mdcvalue", "logmsg","Exceptionthrowable","NDCs"]
135 }
136
137 if [Timestamp]
138 {
139 date {
140 match => ["Timestamp", "UNIX_MS"]
141 target => "Timestamp"
142 }
143 }
144 }
145 # Filter for logback events
146 else {
shanedanielfa847102017-10-25 16:03:42 -0400147
rootc9c35032018-04-06 18:39:22 +0000148 #mutate { add_field => { "orgmsg" => "%{message}" } } # Copy of orginal msg for debug
shanedanielfa847102017-10-25 16:03:42 -0400149
Itay Hasside2da86d2017-08-24 12:54:42 +0000150 mutate {
151 gsub => [
shanedanielfa847102017-10-25 16:03:42 -0400152 'message', ' = ', '=',
Itay Hasside2da86d2017-08-24 12:54:42 +0000153 'message', '= ', '=null',
154 'message', '=\t', '=null ', #This null is followed by a tab
155 'message', '\t$', '\t'
156 ]
157 }
rootc9c35032018-04-06 18:39:22 +0000158 # The grok below parses the message field for all current logback patterns used by oom components.
159 # Example logback pattern: %d{&quot;yyyy-MM-dd'T'HH:mm:ss.SSSXXX&quot;, UTC}|%X{RequestId}|%msg
160 # Example grok pattern: %{TIMESTAMP_ISO8601:Timestamp}\|%{UUID:RequestId}\|%{GREEDYDATA:message}
161 # Use the following command to find all logback patterns in oom directory: find oom -name "logback*xml" -exec grep "property.*attern.*value" {} \;|sort|uniq
Itay Hasside2da86d2017-08-24 12:54:42 +0000162 grok {
Itay Hasside2da86d2017-08-24 12:54:42 +0000163 match => {
rootc9c35032018-04-06 18:39:22 +0000164 "message" => [
165 "%{TIMESTAMP_ISO8601:Timestamp}\\t[%{GREEDYDATA:Thread}]\\t%{GREEDYDATA:loglevel}\\t%{JAVACLASS:Logger}\\t%{GREEDYDATA:MDCs}\\t%{GREEDYDATA:message}",
166 "%{TIMESTAMP_ISO8601:BeginTimestamp}\|%{TIMESTAMP_ISO8601:EndTimestamp}\|%{UUID:RequestId}\|%{GREEDYDATA:ServiceInstanceId}\|%{GREEDYDATA:Thread}\|%{GREEDYDATA:Unknown1}\|%{GREEDYDATA:ServiceName}\|%{GREEDYDATA:PartnerName}\|%{GREEDYDATA:TargetEntity}\|%{GREEDYDATA:TargetServiceName}\|%{GREEDYDATA:StatusCode}\|%{GREEDYDATA:ResponseCode}\|%{GREEDYDATA:ResponseDesc}\|%{UUID:InstanceUUID}\|%{GREEDYDATA:loglevel}\|%{GREEDYDATA:AlertSeverity}\|%{IP:ServerIPAddress}\|%{GREEDYDATA:Timer}\|%{HOSTNAME:ServerFQDN}\|%{IPORHOST:RemoteHost}\|%{GREEDYDATA:Unknown2}\|%{GREEDYDATA:Unknown3}\|%{GREEDYDATA:Unknown4}\|%{GREEDYDATA:TargetVirtualEntity}\|%{GREEDYDATA:Unknown5}\|%{GREEDYDATA:Unknown6}\|%{GREEDYDATA:Unknown7}\|%{GREEDYDATA:Unknown8}\|%{GREEDYDATA:message}",
167 "%{TIMESTAMP_ISO8601:BeginTimestamp}\|%{TIMESTAMP_ISO8601:EndTimestamp}\|%{UUID:RequestId}\|%{GREEDYDATA:ServiceInstanceId}\|%{GREEDYDATA:Thread}\|%{GREEDYDATA:Unknown1}\|%{GREEDYDATA:ServiceName}\|%{GREEDYDATA:PartnerName}\|%{GREEDYDATA:StatusCode}\|%{GREEDYDATA:ResponseCode}\|%{GREEDYDATA:ResponseDesc}\|%{UUID:InstanceUUID}\|%{GREEDYDATA:loglevel}\|%{GREEDYDATA:AlertSeverity}\|%{IP:ServerIPAddress}\|%{GREEDYDATA:Timer}\|%{HOSTNAME:ServerFQDN}\|%{IPORHOST:RemoteHost}\|%{GREEDYDATA:Unknown2}\|%{GREEDYDATA:Unknown3}\|%{GREEDYDATA:Unknown4}\|%{GREEDYDATA:Unknown5}\|%{GREEDYDATA:Unknown6}\|%{GREEDYDATA:Unknown7}\|%{GREEDYDATA:Unknown8}\|%{GREEDYDATA:message}",
168 "%{TIMESTAMP_ISO8601:Timestamp}\|%{UUID:RequestId}\|%{GREEDYDATA:ServiceInstanceId}\|%{GREEDYDATA:Thread}\|%{GREEDYDATA:ServiceName}\|%{UUID:InstanceUUID}\|%{GREEDYDATA:loglevel}\|%{GREEDYDATA:AlertSeverity}\|%{IP:ServerIPAddress}\|%{HOSTNAME:ServerFQDN}\|%{IPORHOST:RemoteHost}\|%{GREEDYDATA:Timer}\|\[%{GREEDYDATA:caller}\]\|%{GREEDYDATA:message}",
169 "%{TIMESTAMP_ISO8601:Timestamp}\|%{GREEDYDATA:RequestId}\|%{GREEDYDATA:Thread}\|%{GREEDYDATA:ServiceName}\|%{GREEDYDATA:PartnerName}\|%{GREEDYDATA:TargetEntity}\|%{GREEDYDATA:TargetServiceName}\|%{GREEDYDATA:loglevel}\|%{GREEDYDATA:ErrorCode}\|%{GREEDYDATA:ErrorDesc}\|%{GREEDYDATA:message}",
170 "%{TIMESTAMP_ISO8601:Timestamp}\|%{GREEDYDATA:RequestId}\|%{GREEDYDATA:Thread}\|%{GREEDYDATA:ClassName}\|%{GREEDYDATA:message}",
171 "%{TIMESTAMP_ISO8601:Timestamp}\|%{UUID:RequestId}\|%{GREEDYDATA:message}",
172 "\[%{TIMESTAMP_ISO8601:Timestamp}\|%{LOGLEVEL:loglevel}\|%{GREEDYDATA:Logger}\|%{GREEDYDATA:Thread}\] %{GREEDYDATA:message}"
173 ]
Itay Hasside2da86d2017-08-24 12:54:42 +0000174 }
175 overwrite => ["message"]
176 }
rootc9c35032018-04-06 18:39:22 +0000177 # The MDCs are key value pairs that are seperated by "," or "\t". Extra space characters are trimmed from the keys and values.
shanedanielfa847102017-10-25 16:03:42 -0400178 kv {
179 source => "MDCs"
rootc9c35032018-04-06 18:39:22 +0000180 field_split => ",\t"
shanedanielfa847102017-10-25 16:03:42 -0400181 trim_key => "\s"
182 trim_value => "\s"
183 remove_field => [ "MDCs" ]
184 }
185
rootc9c35032018-04-06 18:39:22 +0000186 if (![Timestamp] and [EndTimestamp]) {
187 mutate { add_field => { "Timestamp" => "%{EndTimestamp}" } }
188 }
shanedanielfa847102017-10-25 16:03:42 -0400189 date {
190 match => [ "Timestamp", "ISO8601", "yyyy-MM-dd HH:mm:ss,SSS" ]
191 target => "Timestamp"
192 }
193
shanedanielfa847102017-10-25 16:03:42 -0400194 mutate {
rootc9c35032018-04-06 18:39:22 +0000195 remove_field => ["DuplicateRequestID", "Unknown1", "Unknown2", "Unknown3", "Unknown4", "Unknown5", "Unknown6", "Unknown7", "Unknown8"]
shanedanielfa847102017-10-25 16:03:42 -0400196 }
197
198 if ([source] == "/var/log/onap/sdc/sdc-be/audit.log") {
199 #Parse kvps in message
200 kv {
201 field_split => "\s"
202 trim_key => "\s"
203 trim_value => "\s"
204 }
205
206 #If Request Id is missing and DID is present use as RequestId
207 if (![RequestId] and [DID] =~ /.+/) {
208 mutate { add_field => { "RequestId" => "%{DID}" } }
209 }
210 }
211
212 } #Close else statement for logback events
213} #Close filter
Itay Hasside2da86d2017-08-24 12:54:42 +0000214
215
216output {
217 elasticsearch {
218 id => 'onap_es'
219
220 ######### Security configurations #########
221
222 user => "elastic"
223 password => "changeme"
224
shanedanielfa847102017-10-25 16:03:42 -0400225 ## The .cer or .pem file to validate the server's certificate
Itay Hasside2da86d2017-08-24 12:54:42 +0000226 #cacert => $es_cacert
227
228 ## The keystore used to present a certificate to the server. It can be either .jks or .p12
229 #keystore => $es_keystore
230 #keystore_password => $es_keystore_password
231
232 ## Enable SSL/TLS secured communication to Elasticsearch cluster.
233 ## Default is not set which in that case depends on the protocol specidfied in hosts list
234 #ssl => $es_ssl
235
shanedanielfa847102017-10-25 16:03:42 -0400236 ## Option to validate the server's certificate. Default is true
Itay Hasside2da86d2017-08-24 12:54:42 +0000237 #ssl_certificate_verification => $es_ssl_certificate_verification
238
shanedanielfa847102017-10-25 16:03:42 -0400239 ## The JKS truststore to validate the server's certificate.
Itay Hasside2da86d2017-08-24 12:54:42 +0000240 #truststore => $es_truststore
241 #truststore_password => $es_truststore_password
242
243
244 ######### Elasticsearchcluster and host configurations #########
245
BorislavG5f3b6192018-03-25 18:12:38 +0300246 ##can specify one or a list of hosts. If sniffing is set, one is enough and others will be auto-discovered
247 hosts => ["http://{{.Values.config.elasticsearchServiceName}}.{{.Release.Namespace}}:{{.Values.config.elasticsearchPort}}"]
Itay Hasside2da86d2017-08-24 12:54:42 +0000248
249
250 ## This setting asks Elasticsearch for the list of all cluster nodes and adds them to the hosts list. Default is false.
251 sniffing => true
252
253 ## How long to wait, in seconds, between sniffing attempts. Default is 5 seconds.
254 #sniffing_delay => 5
255
256 ## Set the address of a forward HTTP proxy.
257 #proxy => $es_proxy
258
259 ##Use this if you must run Elasticsearch behind a proxy that remaps the root path for the Elasticsearch HTTP API lives
260 #path => $es_path
261
262 ######### Elasticsearch request configurations #########
263
264 ## This setting defines the maximum sized bulk request Logstash will make.
265 #flush_size => ?
266
267 ######### Document configurations #########
268
shanedaniel1cdda182017-11-17 18:21:00 +0000269 index => "logstash-%{+YYYY.MM.dd}"
Itay Hasside2da86d2017-08-24 12:54:42 +0000270 document_type => "logs"
271
272 ## This can be used to associate child documents with a parent using the parent ID.
273 #parent => "abcd'
274 }
275}
276