blob: e46d2bac144db523ce896ca9111d79f3df818f99 [file] [log] [blame]
Itay Hasside2da86d2017-08-24 12:54:42 +00001input {
2 beats {
3
4 ## Add a id to plugin configuration. Can be anything unique.
5 id => 'beats_plugin'
6
7 ######## Connection configurations ########
8
9 ## The port to listen on.
BorislavG5f3b6192018-03-25 18:12:38 +030010 port => {{.Values.service.externalPort}}
Itay Hasside2da86d2017-08-24 12:54:42 +000011
12 ## Close Idle clients after the specified time in seconds. Default is 60 seconds
13 #client_inactivity_timeout => 60
14
15 ######## Security configurations ########
16
17 ## Enable encryption. Default false.
18 #ssl => $filebeat_ssl
19
20 ## ssl certificate path.
21 #ssl_certificate => $filebeat_ssl_certificate
22
23 ## SSL key to use.
24 #ssl_key => $filebeat_ssl_key
25
26 ##SSL key passphrase to use.
27 #ssl_key_passphrase => $filebeat_ssl_key_passphrase
28
29 ## Value can be any of: none, peer, force_peer.
30 #ssl_verify_mode => $filebeat_ssl_verify_mode
31
32 ## Time in milliseconds for an incomplete ssl handshake to timeout. Default is 10000 ms.
33 #ssl_handshake_timeout => 10000
34 include_codec_tag => false
35 }
36}
37
38
39filter {
rootc9c35032018-04-06 18:39:22 +000040 grok {
41 break_on_match => false
42 match => {
43 "source" => ["/var/log/onap/(?<componentName>[^/]+)/",
44 "/var/log/onap/%{GREEDYDATA:componentLogFile}"
45 ]
46 }
47 }
48
Itay Hasside2da86d2017-08-24 12:54:42 +000049 # Filter for log4j xml events
50 if "</log4j:event>" in [message] {
rootc9c35032018-04-06 18:39:22 +000051
52 #mutate { add_field => { "orgmsg_log4j" => "%{message}" } } # Copy of orginal msg for debug
53
Itay Hasside2da86d2017-08-24 12:54:42 +000054 #Filter to parse xml event and retrieve data
55 xml {
56 source => "message"
57 store_xml => false
58 remove_namespaces => true
59 target => "xml_content"
60 xpath => [ "/event/message/text()", "logmsg" ,
61 "/event/@logger", "Logger",
62 "/event/@timestamp", "Timestamp",
63 "/event/@level", "loglevel",
64 "/event/@thread", "Thread",
65 "/event/throwable/text()", "Exceptionthrowable",
66 "/event/NDC/text()", "NDCs",
67 "/event/properties/data/@name","mdcname",
68 "/event/properties/data/@value","mdcvalue"]
69
70 }
71
72 #Ruby filter to iterate and separate MDCs into documents
73 ruby {
74 code => '
75 $i = 0
76 $num = 0
77 if event.get("[mdcname]")
78 $num = event.get("[mdcname]").length
79 end
80 if $num != 0
81 until $i > $num do
82 if event.get("[mdcname]").at($i) and event.get("[mdcvalue]").at($i)
83 event.set(event.get("[mdcname]").at($i), event.get("[mdcvalue]").at($i))
84 end
85 $i=$i+1
86 end
87 end
88 '
89 }
90
91 #Validations
92 if [Exceptionthrowable]
93 {
94 mutate {
95 replace => {
96 "exceptionmessage" => "%{[Exceptionthrowable]}"
97 }
98 }
99 }
100
101 if [NDCs]
102 {
103 mutate {
104 replace => {
105 "NDC" => "%{[NDCs]}"
106 }
107 }
108 }
109
110 mutate {
111 replace => {
112 "Logger" =>"%{[Logger]}"
113 "logmsg" =>"%{[logmsg]}"
114 "Timestamp" =>"%{[Timestamp]}"
115 "loglevel" =>"%{[loglevel]}"
116 "message" => "%{logmsg}"
117 "Thread" => "%{[Thread]}"
118 }
119 remove_field => ["mdcname", "mdcvalue", "logmsg","Exceptionthrowable","NDCs"]
120 }
121
122 if [Timestamp]
123 {
124 date {
125 match => ["Timestamp", "UNIX_MS"]
126 target => "Timestamp"
127 }
128 }
129 }
130 # Filter for logback events
131 else {
shanedanielfa847102017-10-25 16:03:42 -0400132
rootc9c35032018-04-06 18:39:22 +0000133 #mutate { add_field => { "orgmsg" => "%{message}" } } # Copy of orginal msg for debug
shanedanielfa847102017-10-25 16:03:42 -0400134
Itay Hasside2da86d2017-08-24 12:54:42 +0000135 mutate {
136 gsub => [
shanedanielfa847102017-10-25 16:03:42 -0400137 'message', ' = ', '=',
Itay Hasside2da86d2017-08-24 12:54:42 +0000138 'message', '= ', '=null',
139 'message', '=\t', '=null ', #This null is followed by a tab
140 'message', '\t$', '\t'
141 ]
142 }
rootc9c35032018-04-06 18:39:22 +0000143 # The grok below parses the message field for all current logback patterns used by oom components.
144 # Example logback pattern: %d{&quot;yyyy-MM-dd'T'HH:mm:ss.SSSXXX&quot;, UTC}|%X{RequestId}|%msg
145 # Example grok pattern: %{TIMESTAMP_ISO8601:Timestamp}\|%{UUID:RequestId}\|%{GREEDYDATA:message}
146 # Use the following command to find all logback patterns in oom directory: find oom -name "logback*xml" -exec grep "property.*attern.*value" {} \;|sort|uniq
Itay Hasside2da86d2017-08-24 12:54:42 +0000147 grok {
Itay Hasside2da86d2017-08-24 12:54:42 +0000148 match => {
rootc9c35032018-04-06 18:39:22 +0000149 "message" => [
150 "%{TIMESTAMP_ISO8601:Timestamp}\\t[%{GREEDYDATA:Thread}]\\t%{GREEDYDATA:loglevel}\\t%{JAVACLASS:Logger}\\t%{GREEDYDATA:MDCs}\\t%{GREEDYDATA:message}",
151 "%{TIMESTAMP_ISO8601:BeginTimestamp}\|%{TIMESTAMP_ISO8601:EndTimestamp}\|%{UUID:RequestId}\|%{GREEDYDATA:ServiceInstanceId}\|%{GREEDYDATA:Thread}\|%{GREEDYDATA:Unknown1}\|%{GREEDYDATA:ServiceName}\|%{GREEDYDATA:PartnerName}\|%{GREEDYDATA:TargetEntity}\|%{GREEDYDATA:TargetServiceName}\|%{GREEDYDATA:StatusCode}\|%{GREEDYDATA:ResponseCode}\|%{GREEDYDATA:ResponseDesc}\|%{UUID:InstanceUUID}\|%{GREEDYDATA:loglevel}\|%{GREEDYDATA:AlertSeverity}\|%{IP:ServerIPAddress}\|%{GREEDYDATA:Timer}\|%{HOSTNAME:ServerFQDN}\|%{IPORHOST:RemoteHost}\|%{GREEDYDATA:Unknown2}\|%{GREEDYDATA:Unknown3}\|%{GREEDYDATA:Unknown4}\|%{GREEDYDATA:TargetVirtualEntity}\|%{GREEDYDATA:Unknown5}\|%{GREEDYDATA:Unknown6}\|%{GREEDYDATA:Unknown7}\|%{GREEDYDATA:Unknown8}\|%{GREEDYDATA:message}",
152 "%{TIMESTAMP_ISO8601:BeginTimestamp}\|%{TIMESTAMP_ISO8601:EndTimestamp}\|%{UUID:RequestId}\|%{GREEDYDATA:ServiceInstanceId}\|%{GREEDYDATA:Thread}\|%{GREEDYDATA:Unknown1}\|%{GREEDYDATA:ServiceName}\|%{GREEDYDATA:PartnerName}\|%{GREEDYDATA:StatusCode}\|%{GREEDYDATA:ResponseCode}\|%{GREEDYDATA:ResponseDesc}\|%{UUID:InstanceUUID}\|%{GREEDYDATA:loglevel}\|%{GREEDYDATA:AlertSeverity}\|%{IP:ServerIPAddress}\|%{GREEDYDATA:Timer}\|%{HOSTNAME:ServerFQDN}\|%{IPORHOST:RemoteHost}\|%{GREEDYDATA:Unknown2}\|%{GREEDYDATA:Unknown3}\|%{GREEDYDATA:Unknown4}\|%{GREEDYDATA:Unknown5}\|%{GREEDYDATA:Unknown6}\|%{GREEDYDATA:Unknown7}\|%{GREEDYDATA:Unknown8}\|%{GREEDYDATA:message}",
153 "%{TIMESTAMP_ISO8601:Timestamp}\|%{UUID:RequestId}\|%{GREEDYDATA:ServiceInstanceId}\|%{GREEDYDATA:Thread}\|%{GREEDYDATA:ServiceName}\|%{UUID:InstanceUUID}\|%{GREEDYDATA:loglevel}\|%{GREEDYDATA:AlertSeverity}\|%{IP:ServerIPAddress}\|%{HOSTNAME:ServerFQDN}\|%{IPORHOST:RemoteHost}\|%{GREEDYDATA:Timer}\|\[%{GREEDYDATA:caller}\]\|%{GREEDYDATA:message}",
154 "%{TIMESTAMP_ISO8601:Timestamp}\|%{GREEDYDATA:RequestId}\|%{GREEDYDATA:Thread}\|%{GREEDYDATA:ServiceName}\|%{GREEDYDATA:PartnerName}\|%{GREEDYDATA:TargetEntity}\|%{GREEDYDATA:TargetServiceName}\|%{GREEDYDATA:loglevel}\|%{GREEDYDATA:ErrorCode}\|%{GREEDYDATA:ErrorDesc}\|%{GREEDYDATA:message}",
155 "%{TIMESTAMP_ISO8601:Timestamp}\|%{GREEDYDATA:RequestId}\|%{GREEDYDATA:Thread}\|%{GREEDYDATA:ClassName}\|%{GREEDYDATA:message}",
156 "%{TIMESTAMP_ISO8601:Timestamp}\|%{UUID:RequestId}\|%{GREEDYDATA:message}",
157 "\[%{TIMESTAMP_ISO8601:Timestamp}\|%{LOGLEVEL:loglevel}\|%{GREEDYDATA:Logger}\|%{GREEDYDATA:Thread}\] %{GREEDYDATA:message}"
158 ]
Itay Hasside2da86d2017-08-24 12:54:42 +0000159 }
160 overwrite => ["message"]
161 }
rootc9c35032018-04-06 18:39:22 +0000162 # The MDCs are key value pairs that are seperated by "," or "\t". Extra space characters are trimmed from the keys and values.
shanedanielfa847102017-10-25 16:03:42 -0400163 kv {
164 source => "MDCs"
rootc9c35032018-04-06 18:39:22 +0000165 field_split => ",\t"
shanedanielfa847102017-10-25 16:03:42 -0400166 trim_key => "\s"
167 trim_value => "\s"
168 remove_field => [ "MDCs" ]
169 }
170
rootc9c35032018-04-06 18:39:22 +0000171 if (![Timestamp] and [EndTimestamp]) {
172 mutate { add_field => { "Timestamp" => "%{EndTimestamp}" } }
173 }
shanedanielfa847102017-10-25 16:03:42 -0400174 date {
175 match => [ "Timestamp", "ISO8601", "yyyy-MM-dd HH:mm:ss,SSS" ]
176 target => "Timestamp"
177 }
178
shanedanielfa847102017-10-25 16:03:42 -0400179 mutate {
rootc9c35032018-04-06 18:39:22 +0000180 remove_field => ["DuplicateRequestID", "Unknown1", "Unknown2", "Unknown3", "Unknown4", "Unknown5", "Unknown6", "Unknown7", "Unknown8"]
shanedanielfa847102017-10-25 16:03:42 -0400181 }
182
183 if ([source] == "/var/log/onap/sdc/sdc-be/audit.log") {
184 #Parse kvps in message
185 kv {
186 field_split => "\s"
187 trim_key => "\s"
188 trim_value => "\s"
189 }
190
191 #If Request Id is missing and DID is present use as RequestId
192 if (![RequestId] and [DID] =~ /.+/) {
193 mutate { add_field => { "RequestId" => "%{DID}" } }
194 }
195 }
196
197 } #Close else statement for logback events
198} #Close filter
Itay Hasside2da86d2017-08-24 12:54:42 +0000199
200
201output {
202 elasticsearch {
203 id => 'onap_es'
204
205 ######### Security configurations #########
206
207 user => "elastic"
208 password => "changeme"
209
shanedanielfa847102017-10-25 16:03:42 -0400210 ## The .cer or .pem file to validate the server's certificate
Itay Hasside2da86d2017-08-24 12:54:42 +0000211 #cacert => $es_cacert
212
213 ## The keystore used to present a certificate to the server. It can be either .jks or .p12
214 #keystore => $es_keystore
215 #keystore_password => $es_keystore_password
216
217 ## Enable SSL/TLS secured communication to Elasticsearch cluster.
218 ## Default is not set which in that case depends on the protocol specidfied in hosts list
219 #ssl => $es_ssl
220
shanedanielfa847102017-10-25 16:03:42 -0400221 ## Option to validate the server's certificate. Default is true
Itay Hasside2da86d2017-08-24 12:54:42 +0000222 #ssl_certificate_verification => $es_ssl_certificate_verification
223
shanedanielfa847102017-10-25 16:03:42 -0400224 ## The JKS truststore to validate the server's certificate.
Itay Hasside2da86d2017-08-24 12:54:42 +0000225 #truststore => $es_truststore
226 #truststore_password => $es_truststore_password
227
228
229 ######### Elasticsearchcluster and host configurations #########
230
BorislavG5f3b6192018-03-25 18:12:38 +0300231 ##can specify one or a list of hosts. If sniffing is set, one is enough and others will be auto-discovered
232 hosts => ["http://{{.Values.config.elasticsearchServiceName}}.{{.Release.Namespace}}:{{.Values.config.elasticsearchPort}}"]
Itay Hasside2da86d2017-08-24 12:54:42 +0000233
234
235 ## This setting asks Elasticsearch for the list of all cluster nodes and adds them to the hosts list. Default is false.
236 sniffing => true
237
238 ## How long to wait, in seconds, between sniffing attempts. Default is 5 seconds.
239 #sniffing_delay => 5
240
241 ## Set the address of a forward HTTP proxy.
242 #proxy => $es_proxy
243
244 ##Use this if you must run Elasticsearch behind a proxy that remaps the root path for the Elasticsearch HTTP API lives
245 #path => $es_path
246
247 ######### Elasticsearch request configurations #########
248
249 ## This setting defines the maximum sized bulk request Logstash will make.
250 #flush_size => ?
251
252 ######### Document configurations #########
253
shanedaniel1cdda182017-11-17 18:21:00 +0000254 index => "logstash-%{+YYYY.MM.dd}"
Itay Hasside2da86d2017-08-24 12:54:42 +0000255 document_type => "logs"
256
257 ## This can be used to associate child documents with a parent using the parent ID.
258 #parent => "abcd'
259 }
260}
261