blob: ae86385e2e4bd74f709cfed6d369a91b249259ba [file] [log] [blame]
Mukul7de56c82018-09-04 08:03:27 +00001# Copyright © 2018 AT&T, Amdocs, Bell Canada Intellectual Property. All rights reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
Itay Hasside2da86d2017-08-24 12:54:42 +000014input {
15 beats {
16
17 ## Add a id to plugin configuration. Can be anything unique.
18 id => 'beats_plugin'
19
20 ######## Connection configurations ########
21
22 ## The port to listen on.
BorislavG5f3b6192018-03-25 18:12:38 +030023 port => {{.Values.service.externalPort}}
Itay Hasside2da86d2017-08-24 12:54:42 +000024
25 ## Close Idle clients after the specified time in seconds. Default is 60 seconds
26 #client_inactivity_timeout => 60
27
28 ######## Security configurations ########
29
30 ## Enable encryption. Default false.
31 #ssl => $filebeat_ssl
32
33 ## ssl certificate path.
34 #ssl_certificate => $filebeat_ssl_certificate
35
36 ## SSL key to use.
37 #ssl_key => $filebeat_ssl_key
38
39 ##SSL key passphrase to use.
40 #ssl_key_passphrase => $filebeat_ssl_key_passphrase
41
42 ## Value can be any of: none, peer, force_peer.
43 #ssl_verify_mode => $filebeat_ssl_verify_mode
44
45 ## Time in milliseconds for an incomplete ssl handshake to timeout. Default is 10000 ms.
46 #ssl_handshake_timeout => 10000
47 include_codec_tag => false
48 }
49}
50
51
52filter {
rootc9c35032018-04-06 18:39:22 +000053 grok {
54 break_on_match => false
55 match => {
56 "source" => ["/var/log/onap/(?<componentName>[^/]+)/",
57 "/var/log/onap/%{GREEDYDATA:componentLogFile}"
58 ]
59 }
60 }
61
Itay Hasside2da86d2017-08-24 12:54:42 +000062 # Filter for log4j xml events
63 if "</log4j:event>" in [message] {
rootc9c35032018-04-06 18:39:22 +000064
65 #mutate { add_field => { "orgmsg_log4j" => "%{message}" } } # Copy of orginal msg for debug
66
Itay Hasside2da86d2017-08-24 12:54:42 +000067 #Filter to parse xml event and retrieve data
68 xml {
69 source => "message"
70 store_xml => false
71 remove_namespaces => true
72 target => "xml_content"
73 xpath => [ "/event/message/text()", "logmsg" ,
74 "/event/@logger", "Logger",
75 "/event/@timestamp", "Timestamp",
76 "/event/@level", "loglevel",
77 "/event/@thread", "Thread",
78 "/event/throwable/text()", "Exceptionthrowable",
79 "/event/NDC/text()", "NDCs",
80 "/event/properties/data/@name","mdcname",
81 "/event/properties/data/@value","mdcvalue"]
82
83 }
84
85 #Ruby filter to iterate and separate MDCs into documents
86 ruby {
87 code => '
88 $i = 0
89 $num = 0
90 if event.get("[mdcname]")
91 $num = event.get("[mdcname]").length
92 end
93 if $num != 0
94 until $i > $num do
95 if event.get("[mdcname]").at($i) and event.get("[mdcvalue]").at($i)
96 event.set(event.get("[mdcname]").at($i), event.get("[mdcvalue]").at($i))
97 end
98 $i=$i+1
99 end
100 end
101 '
102 }
103
104 #Validations
105 if [Exceptionthrowable]
106 {
107 mutate {
108 replace => {
109 "exceptionmessage" => "%{[Exceptionthrowable]}"
110 }
111 }
112 }
113
114 if [NDCs]
115 {
116 mutate {
117 replace => {
118 "NDC" => "%{[NDCs]}"
119 }
120 }
121 }
122
123 mutate {
124 replace => {
125 "Logger" =>"%{[Logger]}"
126 "logmsg" =>"%{[logmsg]}"
127 "Timestamp" =>"%{[Timestamp]}"
128 "loglevel" =>"%{[loglevel]}"
129 "message" => "%{logmsg}"
130 "Thread" => "%{[Thread]}"
131 }
132 remove_field => ["mdcname", "mdcvalue", "logmsg","Exceptionthrowable","NDCs"]
133 }
134
135 if [Timestamp]
136 {
137 date {
138 match => ["Timestamp", "UNIX_MS"]
139 target => "Timestamp"
140 }
141 }
142 }
143 # Filter for logback events
144 else {
shanedanielfa847102017-10-25 16:03:42 -0400145
rootc9c35032018-04-06 18:39:22 +0000146 #mutate { add_field => { "orgmsg" => "%{message}" } } # Copy of orginal msg for debug
shanedanielfa847102017-10-25 16:03:42 -0400147
Itay Hasside2da86d2017-08-24 12:54:42 +0000148 mutate {
149 gsub => [
shanedanielfa847102017-10-25 16:03:42 -0400150 'message', ' = ', '=',
Itay Hasside2da86d2017-08-24 12:54:42 +0000151 'message', '= ', '=null',
152 'message', '=\t', '=null ', #This null is followed by a tab
153 'message', '\t$', '\t'
154 ]
155 }
rootc9c35032018-04-06 18:39:22 +0000156 # The grok below parses the message field for all current logback patterns used by oom components.
157 # Example logback pattern: %d{&quot;yyyy-MM-dd'T'HH:mm:ss.SSSXXX&quot;, UTC}|%X{RequestId}|%msg
158 # Example grok pattern: %{TIMESTAMP_ISO8601:Timestamp}\|%{UUID:RequestId}\|%{GREEDYDATA:message}
159 # Use the following command to find all logback patterns in oom directory: find oom -name "logback*xml" -exec grep "property.*attern.*value" {} \;|sort|uniq
Itay Hasside2da86d2017-08-24 12:54:42 +0000160 grok {
Itay Hasside2da86d2017-08-24 12:54:42 +0000161 match => {
rootc9c35032018-04-06 18:39:22 +0000162 "message" => [
163 "%{TIMESTAMP_ISO8601:Timestamp}\\t[%{GREEDYDATA:Thread}]\\t%{GREEDYDATA:loglevel}\\t%{JAVACLASS:Logger}\\t%{GREEDYDATA:MDCs}\\t%{GREEDYDATA:message}",
164 "%{TIMESTAMP_ISO8601:BeginTimestamp}\|%{TIMESTAMP_ISO8601:EndTimestamp}\|%{UUID:RequestId}\|%{GREEDYDATA:ServiceInstanceId}\|%{GREEDYDATA:Thread}\|%{GREEDYDATA:Unknown1}\|%{GREEDYDATA:ServiceName}\|%{GREEDYDATA:PartnerName}\|%{GREEDYDATA:TargetEntity}\|%{GREEDYDATA:TargetServiceName}\|%{GREEDYDATA:StatusCode}\|%{GREEDYDATA:ResponseCode}\|%{GREEDYDATA:ResponseDesc}\|%{UUID:InstanceUUID}\|%{GREEDYDATA:loglevel}\|%{GREEDYDATA:AlertSeverity}\|%{IP:ServerIPAddress}\|%{GREEDYDATA:Timer}\|%{HOSTNAME:ServerFQDN}\|%{IPORHOST:RemoteHost}\|%{GREEDYDATA:Unknown2}\|%{GREEDYDATA:Unknown3}\|%{GREEDYDATA:Unknown4}\|%{GREEDYDATA:TargetVirtualEntity}\|%{GREEDYDATA:Unknown5}\|%{GREEDYDATA:Unknown6}\|%{GREEDYDATA:Unknown7}\|%{GREEDYDATA:Unknown8}\|%{GREEDYDATA:message}",
165 "%{TIMESTAMP_ISO8601:BeginTimestamp}\|%{TIMESTAMP_ISO8601:EndTimestamp}\|%{UUID:RequestId}\|%{GREEDYDATA:ServiceInstanceId}\|%{GREEDYDATA:Thread}\|%{GREEDYDATA:Unknown1}\|%{GREEDYDATA:ServiceName}\|%{GREEDYDATA:PartnerName}\|%{GREEDYDATA:StatusCode}\|%{GREEDYDATA:ResponseCode}\|%{GREEDYDATA:ResponseDesc}\|%{UUID:InstanceUUID}\|%{GREEDYDATA:loglevel}\|%{GREEDYDATA:AlertSeverity}\|%{IP:ServerIPAddress}\|%{GREEDYDATA:Timer}\|%{HOSTNAME:ServerFQDN}\|%{IPORHOST:RemoteHost}\|%{GREEDYDATA:Unknown2}\|%{GREEDYDATA:Unknown3}\|%{GREEDYDATA:Unknown4}\|%{GREEDYDATA:Unknown5}\|%{GREEDYDATA:Unknown6}\|%{GREEDYDATA:Unknown7}\|%{GREEDYDATA:Unknown8}\|%{GREEDYDATA:message}",
166 "%{TIMESTAMP_ISO8601:Timestamp}\|%{UUID:RequestId}\|%{GREEDYDATA:ServiceInstanceId}\|%{GREEDYDATA:Thread}\|%{GREEDYDATA:ServiceName}\|%{UUID:InstanceUUID}\|%{GREEDYDATA:loglevel}\|%{GREEDYDATA:AlertSeverity}\|%{IP:ServerIPAddress}\|%{HOSTNAME:ServerFQDN}\|%{IPORHOST:RemoteHost}\|%{GREEDYDATA:Timer}\|\[%{GREEDYDATA:caller}\]\|%{GREEDYDATA:message}",
167 "%{TIMESTAMP_ISO8601:Timestamp}\|%{GREEDYDATA:RequestId}\|%{GREEDYDATA:Thread}\|%{GREEDYDATA:ServiceName}\|%{GREEDYDATA:PartnerName}\|%{GREEDYDATA:TargetEntity}\|%{GREEDYDATA:TargetServiceName}\|%{GREEDYDATA:loglevel}\|%{GREEDYDATA:ErrorCode}\|%{GREEDYDATA:ErrorDesc}\|%{GREEDYDATA:message}",
168 "%{TIMESTAMP_ISO8601:Timestamp}\|%{GREEDYDATA:RequestId}\|%{GREEDYDATA:Thread}\|%{GREEDYDATA:ClassName}\|%{GREEDYDATA:message}",
169 "%{TIMESTAMP_ISO8601:Timestamp}\|%{UUID:RequestId}\|%{GREEDYDATA:message}",
170 "\[%{TIMESTAMP_ISO8601:Timestamp}\|%{LOGLEVEL:loglevel}\|%{GREEDYDATA:Logger}\|%{GREEDYDATA:Thread}\] %{GREEDYDATA:message}"
171 ]
Itay Hasside2da86d2017-08-24 12:54:42 +0000172 }
173 overwrite => ["message"]
174 }
rootc9c35032018-04-06 18:39:22 +0000175 # The MDCs are key value pairs that are seperated by "," or "\t". Extra space characters are trimmed from the keys and values.
shanedanielfa847102017-10-25 16:03:42 -0400176 kv {
177 source => "MDCs"
rootc9c35032018-04-06 18:39:22 +0000178 field_split => ",\t"
shanedanielfa847102017-10-25 16:03:42 -0400179 trim_key => "\s"
180 trim_value => "\s"
181 remove_field => [ "MDCs" ]
182 }
183
rootc9c35032018-04-06 18:39:22 +0000184 if (![Timestamp] and [EndTimestamp]) {
185 mutate { add_field => { "Timestamp" => "%{EndTimestamp}" } }
186 }
shanedanielfa847102017-10-25 16:03:42 -0400187 date {
188 match => [ "Timestamp", "ISO8601", "yyyy-MM-dd HH:mm:ss,SSS" ]
189 target => "Timestamp"
190 }
191
shanedanielfa847102017-10-25 16:03:42 -0400192 mutate {
rootc9c35032018-04-06 18:39:22 +0000193 remove_field => ["DuplicateRequestID", "Unknown1", "Unknown2", "Unknown3", "Unknown4", "Unknown5", "Unknown6", "Unknown7", "Unknown8"]
shanedanielfa847102017-10-25 16:03:42 -0400194 }
195
196 if ([source] == "/var/log/onap/sdc/sdc-be/audit.log") {
197 #Parse kvps in message
198 kv {
199 field_split => "\s"
200 trim_key => "\s"
201 trim_value => "\s"
202 }
203
204 #If Request Id is missing and DID is present use as RequestId
205 if (![RequestId] and [DID] =~ /.+/) {
206 mutate { add_field => { "RequestId" => "%{DID}" } }
207 }
208 }
209
210 } #Close else statement for logback events
211} #Close filter
Itay Hasside2da86d2017-08-24 12:54:42 +0000212
213
214output {
215 elasticsearch {
216 id => 'onap_es'
217
218 ######### Security configurations #########
219
220 user => "elastic"
221 password => "changeme"
222
shanedanielfa847102017-10-25 16:03:42 -0400223 ## The .cer or .pem file to validate the server's certificate
Itay Hasside2da86d2017-08-24 12:54:42 +0000224 #cacert => $es_cacert
225
226 ## The keystore used to present a certificate to the server. It can be either .jks or .p12
227 #keystore => $es_keystore
228 #keystore_password => $es_keystore_password
229
230 ## Enable SSL/TLS secured communication to Elasticsearch cluster.
231 ## Default is not set which in that case depends on the protocol specidfied in hosts list
232 #ssl => $es_ssl
233
shanedanielfa847102017-10-25 16:03:42 -0400234 ## Option to validate the server's certificate. Default is true
Itay Hasside2da86d2017-08-24 12:54:42 +0000235 #ssl_certificate_verification => $es_ssl_certificate_verification
236
shanedanielfa847102017-10-25 16:03:42 -0400237 ## The JKS truststore to validate the server's certificate.
Itay Hasside2da86d2017-08-24 12:54:42 +0000238 #truststore => $es_truststore
239 #truststore_password => $es_truststore_password
240
241
242 ######### Elasticsearchcluster and host configurations #########
243
BorislavG5f3b6192018-03-25 18:12:38 +0300244 ##can specify one or a list of hosts. If sniffing is set, one is enough and others will be auto-discovered
245 hosts => ["http://{{.Values.config.elasticsearchServiceName}}.{{.Release.Namespace}}:{{.Values.config.elasticsearchPort}}"]
Itay Hasside2da86d2017-08-24 12:54:42 +0000246
247
248 ## This setting asks Elasticsearch for the list of all cluster nodes and adds them to the hosts list. Default is false.
249 sniffing => true
250
251 ## How long to wait, in seconds, between sniffing attempts. Default is 5 seconds.
252 #sniffing_delay => 5
253
254 ## Set the address of a forward HTTP proxy.
255 #proxy => $es_proxy
256
257 ##Use this if you must run Elasticsearch behind a proxy that remaps the root path for the Elasticsearch HTTP API lives
258 #path => $es_path
259
260 ######### Elasticsearch request configurations #########
261
262 ## This setting defines the maximum sized bulk request Logstash will make.
263 #flush_size => ?
264
265 ######### Document configurations #########
266
shanedaniel1cdda182017-11-17 18:21:00 +0000267 index => "logstash-%{+YYYY.MM.dd}"
Itay Hasside2da86d2017-08-24 12:54:42 +0000268 document_type => "logs"
269
270 ## This can be used to associate child documents with a parent using the parent ID.
271 #parent => "abcd'
272 }
273}
274