Itay Hassid | e2da86d | 2017-08-24 12:54:42 +0000 | [diff] [blame] | 1 | input { |
| 2 | beats { |
| 3 | |
| 4 | ## Add a id to plugin configuration. Can be anything unique. |
| 5 | id => 'beats_plugin' |
| 6 | |
| 7 | ######## Connection configurations ######## |
| 8 | |
| 9 | ## The port to listen on. |
BorislavG | 5f3b619 | 2018-03-25 18:12:38 +0300 | [diff] [blame] | 10 | port => {{.Values.service.externalPort}} |
Itay Hassid | e2da86d | 2017-08-24 12:54:42 +0000 | [diff] [blame] | 11 | |
| 12 | ## Close Idle clients after the specified time in seconds. Default is 60 seconds |
| 13 | #client_inactivity_timeout => 60 |
| 14 | |
| 15 | ######## Security configurations ######## |
| 16 | |
| 17 | ## Enable encryption. Default false. |
| 18 | #ssl => $filebeat_ssl |
| 19 | |
| 20 | ## ssl certificate path. |
| 21 | #ssl_certificate => $filebeat_ssl_certificate |
| 22 | |
| 23 | ## SSL key to use. |
| 24 | #ssl_key => $filebeat_ssl_key |
| 25 | |
| 26 | ##SSL key passphrase to use. |
| 27 | #ssl_key_passphrase => $filebeat_ssl_key_passphrase |
| 28 | |
| 29 | ## Value can be any of: none, peer, force_peer. |
| 30 | #ssl_verify_mode => $filebeat_ssl_verify_mode |
| 31 | |
| 32 | ## Time in milliseconds for an incomplete ssl handshake to timeout. Default is 10000 ms. |
| 33 | #ssl_handshake_timeout => 10000 |
| 34 | include_codec_tag => false |
| 35 | } |
| 36 | } |
| 37 | |
| 38 | |
| 39 | filter { |
root | c9c3503 | 2018-04-06 18:39:22 +0000 | [diff] [blame] | 40 | grok { |
| 41 | break_on_match => false |
| 42 | match => { |
| 43 | "source" => ["/var/log/onap/(?<componentName>[^/]+)/", |
| 44 | "/var/log/onap/%{GREEDYDATA:componentLogFile}" |
| 45 | ] |
| 46 | } |
| 47 | } |
| 48 | |
Itay Hassid | e2da86d | 2017-08-24 12:54:42 +0000 | [diff] [blame] | 49 | # Filter for log4j xml events |
| 50 | if "</log4j:event>" in [message] { |
root | c9c3503 | 2018-04-06 18:39:22 +0000 | [diff] [blame] | 51 | |
| 52 | #mutate { add_field => { "orgmsg_log4j" => "%{message}" } } # Copy of orginal msg for debug |
| 53 | |
Itay Hassid | e2da86d | 2017-08-24 12:54:42 +0000 | [diff] [blame] | 54 | #Filter to parse xml event and retrieve data |
| 55 | xml { |
| 56 | source => "message" |
| 57 | store_xml => false |
| 58 | remove_namespaces => true |
| 59 | target => "xml_content" |
| 60 | xpath => [ "/event/message/text()", "logmsg" , |
| 61 | "/event/@logger", "Logger", |
| 62 | "/event/@timestamp", "Timestamp", |
| 63 | "/event/@level", "loglevel", |
| 64 | "/event/@thread", "Thread", |
| 65 | "/event/throwable/text()", "Exceptionthrowable", |
| 66 | "/event/NDC/text()", "NDCs", |
| 67 | "/event/properties/data/@name","mdcname", |
| 68 | "/event/properties/data/@value","mdcvalue"] |
| 69 | |
| 70 | } |
| 71 | |
| 72 | #Ruby filter to iterate and separate MDCs into documents |
| 73 | ruby { |
| 74 | code => ' |
| 75 | $i = 0 |
| 76 | $num = 0 |
| 77 | if event.get("[mdcname]") |
| 78 | $num = event.get("[mdcname]").length |
| 79 | end |
| 80 | if $num != 0 |
| 81 | until $i > $num do |
| 82 | if event.get("[mdcname]").at($i) and event.get("[mdcvalue]").at($i) |
| 83 | event.set(event.get("[mdcname]").at($i), event.get("[mdcvalue]").at($i)) |
| 84 | end |
| 85 | $i=$i+1 |
| 86 | end |
| 87 | end |
| 88 | ' |
| 89 | } |
| 90 | |
| 91 | #Validations |
| 92 | if [Exceptionthrowable] |
| 93 | { |
| 94 | mutate { |
| 95 | replace => { |
| 96 | "exceptionmessage" => "%{[Exceptionthrowable]}" |
| 97 | } |
| 98 | } |
| 99 | } |
| 100 | |
| 101 | if [NDCs] |
| 102 | { |
| 103 | mutate { |
| 104 | replace => { |
| 105 | "NDC" => "%{[NDCs]}" |
| 106 | } |
| 107 | } |
| 108 | } |
| 109 | |
| 110 | mutate { |
| 111 | replace => { |
| 112 | "Logger" =>"%{[Logger]}" |
| 113 | "logmsg" =>"%{[logmsg]}" |
| 114 | "Timestamp" =>"%{[Timestamp]}" |
| 115 | "loglevel" =>"%{[loglevel]}" |
| 116 | "message" => "%{logmsg}" |
| 117 | "Thread" => "%{[Thread]}" |
| 118 | } |
| 119 | remove_field => ["mdcname", "mdcvalue", "logmsg","Exceptionthrowable","NDCs"] |
| 120 | } |
| 121 | |
| 122 | if [Timestamp] |
| 123 | { |
| 124 | date { |
| 125 | match => ["Timestamp", "UNIX_MS"] |
| 126 | target => "Timestamp" |
| 127 | } |
| 128 | } |
| 129 | } |
| 130 | # Filter for logback events |
| 131 | else { |
shanedaniel | fa84710 | 2017-10-25 16:03:42 -0400 | [diff] [blame] | 132 | |
root | c9c3503 | 2018-04-06 18:39:22 +0000 | [diff] [blame] | 133 | #mutate { add_field => { "orgmsg" => "%{message}" } } # Copy of orginal msg for debug |
shanedaniel | fa84710 | 2017-10-25 16:03:42 -0400 | [diff] [blame] | 134 | |
Itay Hassid | e2da86d | 2017-08-24 12:54:42 +0000 | [diff] [blame] | 135 | mutate { |
| 136 | gsub => [ |
shanedaniel | fa84710 | 2017-10-25 16:03:42 -0400 | [diff] [blame] | 137 | 'message', ' = ', '=', |
Itay Hassid | e2da86d | 2017-08-24 12:54:42 +0000 | [diff] [blame] | 138 | 'message', '= ', '=null', |
| 139 | 'message', '=\t', '=null ', #This null is followed by a tab |
| 140 | 'message', '\t$', '\t' |
| 141 | ] |
| 142 | } |
root | c9c3503 | 2018-04-06 18:39:22 +0000 | [diff] [blame] | 143 | # The grok below parses the message field for all current logback patterns used by oom components. |
| 144 | # Example logback pattern: %d{"yyyy-MM-dd'T'HH:mm:ss.SSSXXX", UTC}|%X{RequestId}|%msg |
| 145 | # Example grok pattern: %{TIMESTAMP_ISO8601:Timestamp}\|%{UUID:RequestId}\|%{GREEDYDATA:message} |
| 146 | # Use the following command to find all logback patterns in oom directory: find oom -name "logback*xml" -exec grep "property.*attern.*value" {} \;|sort|uniq |
Itay Hassid | e2da86d | 2017-08-24 12:54:42 +0000 | [diff] [blame] | 147 | grok { |
Itay Hassid | e2da86d | 2017-08-24 12:54:42 +0000 | [diff] [blame] | 148 | match => { |
root | c9c3503 | 2018-04-06 18:39:22 +0000 | [diff] [blame] | 149 | "message" => [ |
| 150 | "%{TIMESTAMP_ISO8601:Timestamp}\\t[%{GREEDYDATA:Thread}]\\t%{GREEDYDATA:loglevel}\\t%{JAVACLASS:Logger}\\t%{GREEDYDATA:MDCs}\\t%{GREEDYDATA:message}", |
| 151 | "%{TIMESTAMP_ISO8601:BeginTimestamp}\|%{TIMESTAMP_ISO8601:EndTimestamp}\|%{UUID:RequestId}\|%{GREEDYDATA:ServiceInstanceId}\|%{GREEDYDATA:Thread}\|%{GREEDYDATA:Unknown1}\|%{GREEDYDATA:ServiceName}\|%{GREEDYDATA:PartnerName}\|%{GREEDYDATA:TargetEntity}\|%{GREEDYDATA:TargetServiceName}\|%{GREEDYDATA:StatusCode}\|%{GREEDYDATA:ResponseCode}\|%{GREEDYDATA:ResponseDesc}\|%{UUID:InstanceUUID}\|%{GREEDYDATA:loglevel}\|%{GREEDYDATA:AlertSeverity}\|%{IP:ServerIPAddress}\|%{GREEDYDATA:Timer}\|%{HOSTNAME:ServerFQDN}\|%{IPORHOST:RemoteHost}\|%{GREEDYDATA:Unknown2}\|%{GREEDYDATA:Unknown3}\|%{GREEDYDATA:Unknown4}\|%{GREEDYDATA:TargetVirtualEntity}\|%{GREEDYDATA:Unknown5}\|%{GREEDYDATA:Unknown6}\|%{GREEDYDATA:Unknown7}\|%{GREEDYDATA:Unknown8}\|%{GREEDYDATA:message}", |
| 152 | "%{TIMESTAMP_ISO8601:BeginTimestamp}\|%{TIMESTAMP_ISO8601:EndTimestamp}\|%{UUID:RequestId}\|%{GREEDYDATA:ServiceInstanceId}\|%{GREEDYDATA:Thread}\|%{GREEDYDATA:Unknown1}\|%{GREEDYDATA:ServiceName}\|%{GREEDYDATA:PartnerName}\|%{GREEDYDATA:StatusCode}\|%{GREEDYDATA:ResponseCode}\|%{GREEDYDATA:ResponseDesc}\|%{UUID:InstanceUUID}\|%{GREEDYDATA:loglevel}\|%{GREEDYDATA:AlertSeverity}\|%{IP:ServerIPAddress}\|%{GREEDYDATA:Timer}\|%{HOSTNAME:ServerFQDN}\|%{IPORHOST:RemoteHost}\|%{GREEDYDATA:Unknown2}\|%{GREEDYDATA:Unknown3}\|%{GREEDYDATA:Unknown4}\|%{GREEDYDATA:Unknown5}\|%{GREEDYDATA:Unknown6}\|%{GREEDYDATA:Unknown7}\|%{GREEDYDATA:Unknown8}\|%{GREEDYDATA:message}", |
| 153 | "%{TIMESTAMP_ISO8601:Timestamp}\|%{UUID:RequestId}\|%{GREEDYDATA:ServiceInstanceId}\|%{GREEDYDATA:Thread}\|%{GREEDYDATA:ServiceName}\|%{UUID:InstanceUUID}\|%{GREEDYDATA:loglevel}\|%{GREEDYDATA:AlertSeverity}\|%{IP:ServerIPAddress}\|%{HOSTNAME:ServerFQDN}\|%{IPORHOST:RemoteHost}\|%{GREEDYDATA:Timer}\|\[%{GREEDYDATA:caller}\]\|%{GREEDYDATA:message}", |
| 154 | "%{TIMESTAMP_ISO8601:Timestamp}\|%{GREEDYDATA:RequestId}\|%{GREEDYDATA:Thread}\|%{GREEDYDATA:ServiceName}\|%{GREEDYDATA:PartnerName}\|%{GREEDYDATA:TargetEntity}\|%{GREEDYDATA:TargetServiceName}\|%{GREEDYDATA:loglevel}\|%{GREEDYDATA:ErrorCode}\|%{GREEDYDATA:ErrorDesc}\|%{GREEDYDATA:message}", |
| 155 | "%{TIMESTAMP_ISO8601:Timestamp}\|%{GREEDYDATA:RequestId}\|%{GREEDYDATA:Thread}\|%{GREEDYDATA:ClassName}\|%{GREEDYDATA:message}", |
| 156 | "%{TIMESTAMP_ISO8601:Timestamp}\|%{UUID:RequestId}\|%{GREEDYDATA:message}", |
| 157 | "\[%{TIMESTAMP_ISO8601:Timestamp}\|%{LOGLEVEL:loglevel}\|%{GREEDYDATA:Logger}\|%{GREEDYDATA:Thread}\] %{GREEDYDATA:message}" |
| 158 | ] |
Itay Hassid | e2da86d | 2017-08-24 12:54:42 +0000 | [diff] [blame] | 159 | } |
| 160 | overwrite => ["message"] |
| 161 | } |
root | c9c3503 | 2018-04-06 18:39:22 +0000 | [diff] [blame] | 162 | # The MDCs are key value pairs that are seperated by "," or "\t". Extra space characters are trimmed from the keys and values. |
shanedaniel | fa84710 | 2017-10-25 16:03:42 -0400 | [diff] [blame] | 163 | kv { |
| 164 | source => "MDCs" |
root | c9c3503 | 2018-04-06 18:39:22 +0000 | [diff] [blame] | 165 | field_split => ",\t" |
shanedaniel | fa84710 | 2017-10-25 16:03:42 -0400 | [diff] [blame] | 166 | trim_key => "\s" |
| 167 | trim_value => "\s" |
| 168 | remove_field => [ "MDCs" ] |
| 169 | } |
| 170 | |
root | c9c3503 | 2018-04-06 18:39:22 +0000 | [diff] [blame] | 171 | if (![Timestamp] and [EndTimestamp]) { |
| 172 | mutate { add_field => { "Timestamp" => "%{EndTimestamp}" } } |
| 173 | } |
shanedaniel | fa84710 | 2017-10-25 16:03:42 -0400 | [diff] [blame] | 174 | date { |
| 175 | match => [ "Timestamp", "ISO8601", "yyyy-MM-dd HH:mm:ss,SSS" ] |
| 176 | target => "Timestamp" |
| 177 | } |
| 178 | |
shanedaniel | fa84710 | 2017-10-25 16:03:42 -0400 | [diff] [blame] | 179 | mutate { |
root | c9c3503 | 2018-04-06 18:39:22 +0000 | [diff] [blame] | 180 | remove_field => ["DuplicateRequestID", "Unknown1", "Unknown2", "Unknown3", "Unknown4", "Unknown5", "Unknown6", "Unknown7", "Unknown8"] |
shanedaniel | fa84710 | 2017-10-25 16:03:42 -0400 | [diff] [blame] | 181 | } |
| 182 | |
| 183 | if ([source] == "/var/log/onap/sdc/sdc-be/audit.log") { |
| 184 | #Parse kvps in message |
| 185 | kv { |
| 186 | field_split => "\s" |
| 187 | trim_key => "\s" |
| 188 | trim_value => "\s" |
| 189 | } |
| 190 | |
| 191 | #If Request Id is missing and DID is present use as RequestId |
| 192 | if (![RequestId] and [DID] =~ /.+/) { |
| 193 | mutate { add_field => { "RequestId" => "%{DID}" } } |
| 194 | } |
| 195 | } |
| 196 | |
| 197 | } #Close else statement for logback events |
| 198 | } #Close filter |
Itay Hassid | e2da86d | 2017-08-24 12:54:42 +0000 | [diff] [blame] | 199 | |
| 200 | |
| 201 | output { |
| 202 | elasticsearch { |
| 203 | id => 'onap_es' |
| 204 | |
| 205 | ######### Security configurations ######### |
| 206 | |
| 207 | user => "elastic" |
| 208 | password => "changeme" |
| 209 | |
shanedaniel | fa84710 | 2017-10-25 16:03:42 -0400 | [diff] [blame] | 210 | ## The .cer or .pem file to validate the server's certificate |
Itay Hassid | e2da86d | 2017-08-24 12:54:42 +0000 | [diff] [blame] | 211 | #cacert => $es_cacert |
| 212 | |
| 213 | ## The keystore used to present a certificate to the server. It can be either .jks or .p12 |
| 214 | #keystore => $es_keystore |
| 215 | #keystore_password => $es_keystore_password |
| 216 | |
| 217 | ## Enable SSL/TLS secured communication to Elasticsearch cluster. |
| 218 | ## Default is not set which in that case depends on the protocol specidfied in hosts list |
| 219 | #ssl => $es_ssl |
| 220 | |
shanedaniel | fa84710 | 2017-10-25 16:03:42 -0400 | [diff] [blame] | 221 | ## Option to validate the server's certificate. Default is true |
Itay Hassid | e2da86d | 2017-08-24 12:54:42 +0000 | [diff] [blame] | 222 | #ssl_certificate_verification => $es_ssl_certificate_verification |
| 223 | |
shanedaniel | fa84710 | 2017-10-25 16:03:42 -0400 | [diff] [blame] | 224 | ## The JKS truststore to validate the server's certificate. |
Itay Hassid | e2da86d | 2017-08-24 12:54:42 +0000 | [diff] [blame] | 225 | #truststore => $es_truststore |
| 226 | #truststore_password => $es_truststore_password |
| 227 | |
| 228 | |
| 229 | ######### Elasticsearchcluster and host configurations ######### |
| 230 | |
BorislavG | 5f3b619 | 2018-03-25 18:12:38 +0300 | [diff] [blame] | 231 | ##can specify one or a list of hosts. If sniffing is set, one is enough and others will be auto-discovered |
| 232 | hosts => ["http://{{.Values.config.elasticsearchServiceName}}.{{.Release.Namespace}}:{{.Values.config.elasticsearchPort}}"] |
Itay Hassid | e2da86d | 2017-08-24 12:54:42 +0000 | [diff] [blame] | 233 | |
| 234 | |
| 235 | ## This setting asks Elasticsearch for the list of all cluster nodes and adds them to the hosts list. Default is false. |
| 236 | sniffing => true |
| 237 | |
| 238 | ## How long to wait, in seconds, between sniffing attempts. Default is 5 seconds. |
| 239 | #sniffing_delay => 5 |
| 240 | |
| 241 | ## Set the address of a forward HTTP proxy. |
| 242 | #proxy => $es_proxy |
| 243 | |
| 244 | ##Use this if you must run Elasticsearch behind a proxy that remaps the root path for the Elasticsearch HTTP API lives |
| 245 | #path => $es_path |
| 246 | |
| 247 | ######### Elasticsearch request configurations ######### |
| 248 | |
| 249 | ## This setting defines the maximum sized bulk request Logstash will make. |
| 250 | #flush_size => ? |
| 251 | |
| 252 | ######### Document configurations ######### |
| 253 | |
shanedaniel | 1cdda18 | 2017-11-17 18:21:00 +0000 | [diff] [blame] | 254 | index => "logstash-%{+YYYY.MM.dd}" |
Itay Hassid | e2da86d | 2017-08-24 12:54:42 +0000 | [diff] [blame] | 255 | document_type => "logs" |
| 256 | |
| 257 | ## This can be used to associate child documents with a parent using the parent ID. |
| 258 | #parent => "abcd' |
| 259 | } |
| 260 | } |
| 261 | |